* Fix bug when restarting index from lst block * Include tests for block sequencer * Check for new blocks periodically after indexing Co-authored-by: Alex Garibay <alex@alexgaribay.com> Co-authored-by: Luke Imhoff <luke.imhoff@dockyard.com>pull/162/head
parent
fa2ab3b24b
commit
117871a834
@ -1,25 +0,0 @@ |
||||
defmodule Explorer.Chain.BlockTransaction do |
||||
@moduledoc "Connects a Block to a Transaction" |
||||
|
||||
use Explorer.Schema |
||||
|
||||
alias Explorer.Chain.{Block, Transaction} |
||||
|
||||
@primary_key false |
||||
schema "block_transactions" do |
||||
belongs_to(:block, Block) |
||||
belongs_to(:transaction, Transaction, primary_key: true) |
||||
timestamps() |
||||
end |
||||
|
||||
@required_attrs ~w(block_id transaction_id)a |
||||
|
||||
def changeset(%__MODULE__{} = block_transaction, attrs \\ %{}) do |
||||
block_transaction |
||||
|> cast(attrs, @required_attrs) |
||||
|> validate_required(@required_attrs) |
||||
|> cast_assoc(:block) |
||||
|> cast_assoc(:transaction) |
||||
|> unique_constraint(:transaction_id, name: :block_transactions_transaction_id_index) |
||||
end |
||||
end |
@ -1,21 +0,0 @@ |
||||
defmodule Explorer.Chain.FromAddress do |
||||
@moduledoc false |
||||
|
||||
use Explorer.Schema |
||||
|
||||
alias Explorer.Chain.{Address, Transaction} |
||||
|
||||
@primary_key false |
||||
schema "from_addresses" do |
||||
belongs_to(:address, Address) |
||||
belongs_to(:transaction, Transaction, primary_key: true) |
||||
|
||||
timestamps() |
||||
end |
||||
|
||||
def changeset(%__MODULE__{} = to_address, attrs \\ %{}) do |
||||
to_address |
||||
|> cast(attrs, [:transaction_id, :address_id]) |
||||
|> unique_constraint(:transaction_id, name: :from_addresses_transaction_id_index) |
||||
end |
||||
end |
@ -1,20 +0,0 @@ |
||||
defmodule Explorer.Chain.ToAddress do |
||||
@moduledoc false |
||||
|
||||
use Explorer.Schema |
||||
|
||||
alias Explorer.Chain.{Address, Transaction} |
||||
|
||||
@primary_key false |
||||
schema "to_addresses" do |
||||
belongs_to(:address, Address) |
||||
belongs_to(:transaction, Transaction, primary_key: true) |
||||
timestamps() |
||||
end |
||||
|
||||
def changeset(%__MODULE__{} = to_address, attrs \\ %{}) do |
||||
to_address |
||||
|> cast(attrs, [:transaction_id, :address_id]) |
||||
|> unique_constraint(:transaction_id, name: :to_addresses_transaction_id_index) |
||||
end |
||||
end |
@ -0,0 +1,287 @@ |
||||
defmodule Explorer.ETH do |
||||
@moduledoc """ |
||||
Ethereum JSONRPC client. |
||||
|
||||
## Configuration |
||||
|
||||
Configuration for parity URLs can be provided with the |
||||
following mix config: |
||||
|
||||
config :explorer, :eth_client, |
||||
url: "https://sokol.poa.network", |
||||
trace_url: "https://sokol-trace.poa.network", |
||||
http: [recv_timeout: 60_000, timeout: 60_000, hackney: [pool: :eth]] |
||||
|
||||
Note: the tracing node URL is provided separately from `:url`, via |
||||
`:trace_url`. The trace URL and is used for `fetch_internal_transactions`, |
||||
which is only a supported method on tracing nodes. The `:http` option is |
||||
passed directly to the HTTP library (`HTTPoison`), which forwards the |
||||
options down to `:hackney`. |
||||
""" |
||||
require Logger |
||||
|
||||
def child_spec(_opts) do |
||||
:hackney_pool.child_spec(:eth, recv_timeout: 60_000, timeout: 60_000, max_connections: 1000) |
||||
end |
||||
|
||||
@doc """ |
||||
Creates a filter subscription that can be polled for retreiving new blocks. |
||||
""" |
||||
def listen_for_new_blocks do |
||||
id = DateTime.utc_now() |> DateTime.to_unix() |
||||
|
||||
request = %{ |
||||
"id" => id, |
||||
"jsonrpc" => "2.0", |
||||
"method" => "eth_newBlockFilter", |
||||
"params" => [] |
||||
} |
||||
|
||||
json_rpc(request, config(:url)) |
||||
end |
||||
|
||||
@doc """ |
||||
Lists changes for a given filter subscription. |
||||
""" |
||||
def check_for_updates(filter_id) do |
||||
request = %{ |
||||
"id" => filter_id, |
||||
"jsonrpc" => "2.0", |
||||
"method" => "eth_getFilterChanges", |
||||
"params" => [filter_id] |
||||
} |
||||
|
||||
json_rpc(request, config(:url)) |
||||
end |
||||
|
||||
@doc """ |
||||
Fetches blocks by block hashes. |
||||
|
||||
Transaction data is included for each block. |
||||
""" |
||||
def fetch_blocks_by_hash(block_hashes) do |
||||
batched_requests = |
||||
for block_hash <- block_hashes do |
||||
%{ |
||||
"id" => block_hash, |
||||
"jsonrpc" => "2.0", |
||||
"method" => "eth_getBlockByHash", |
||||
"params" => [block_hash, true] |
||||
} |
||||
end |
||||
|
||||
json_rpc(batched_requests, config(:url)) |
||||
end |
||||
|
||||
def decode_int(hex) do |
||||
{"0x", base_16} = String.split_at(hex, 2) |
||||
String.to_integer(base_16, 16) |
||||
end |
||||
|
||||
def decode_time(field) do |
||||
field |> decode_int() |> Timex.from_unix() |
||||
end |
||||
|
||||
def fetch_transaction_receipts(hashes) when is_list(hashes) do |
||||
hashes |
||||
|> Enum.map(fn hash -> |
||||
%{ |
||||
"id" => hash, |
||||
"jsonrpc" => "2.0", |
||||
"method" => "eth_getTransactionReceipt", |
||||
"params" => [hash] |
||||
} |
||||
end) |
||||
|> json_rpc(config(:url)) |
||||
|> handle_receipts() |
||||
end |
||||
|
||||
defp handle_receipts({:ok, results}) do |
||||
results_map = |
||||
Enum.into(results, %{}, fn %{"id" => hash, "result" => receipt} -> |
||||
{hash, |
||||
Map.merge(receipt, %{ |
||||
"transactionHash" => String.downcase(receipt["transactionHash"]), |
||||
"transactionIndex" => decode_int(receipt["transactionIndex"]), |
||||
"cumulativeGasUsed" => decode_int(receipt["cumulativeGasUsed"]), |
||||
"gasUsed" => decode_int(receipt["gasUsed"]), |
||||
"status" => decode_int(receipt["status"]), |
||||
"logs" => |
||||
Enum.map(receipt["logs"], fn log -> |
||||
Map.merge(log, %{"logIndex" => decode_int(log["logIndex"])}) |
||||
end) |
||||
})} |
||||
end) |
||||
|
||||
{:ok, results_map} |
||||
end |
||||
|
||||
defp handle_receipts({:error, reason}) do |
||||
{:error, reason} |
||||
end |
||||
|
||||
def fetch_internal_transactions(hashes) when is_list(hashes) do |
||||
hashes |
||||
|> Enum.map(fn hash -> |
||||
%{ |
||||
"id" => hash, |
||||
"jsonrpc" => "2.0", |
||||
"method" => "trace_replayTransaction", |
||||
"params" => [hash, ["trace"]] |
||||
} |
||||
end) |
||||
|> json_rpc(config(:trace_url)) |
||||
|> handle_internal_transactions() |
||||
end |
||||
|
||||
defp handle_internal_transactions({:ok, results}) do |
||||
results_map = |
||||
Enum.into(results, %{}, fn |
||||
%{"error" => error} -> |
||||
throw({:error, error}) |
||||
|
||||
%{"id" => hash, "result" => %{"trace" => traces}} -> |
||||
{hash, Enum.map(traces, &decode_trace(&1))} |
||||
end) |
||||
|
||||
{:ok, results_map} |
||||
catch |
||||
{:error, reason} -> {:error, reason} |
||||
end |
||||
|
||||
defp handle_internal_transactions({:error, reason}) do |
||||
{:error, reason} |
||||
end |
||||
|
||||
defp decode_trace(%{"action" => action} = trace) do |
||||
trace |
||||
|> Map.merge(%{ |
||||
"action" => |
||||
Map.merge(action, %{ |
||||
"value" => decode_int(action["value"]), |
||||
"gas" => decode_int(action["gas"]) |
||||
}) |
||||
}) |
||||
|> put_gas_used() |
||||
end |
||||
|
||||
defp put_gas_used(%{"error" => _} = trace), do: trace |
||||
|
||||
defp put_gas_used(%{"result" => %{"gasUsed" => gas}} = trace) do |
||||
put_in(trace, ["result", "gasUsed"], decode_int(gas)) |
||||
end |
||||
|
||||
@doc """ |
||||
Fetches blocks by block number range. |
||||
""" |
||||
def fetch_blocks_by_range(block_start, block_end) do |
||||
block_start |
||||
|> build_batch_get_block_by_number(block_end) |
||||
|> json_rpc(config(:url)) |
||||
|> handle_get_block_by_number(block_start, block_end) |
||||
end |
||||
|
||||
defp build_batch_get_block_by_number(block_start, block_end) do |
||||
for current <- block_start..block_end do |
||||
%{ |
||||
"id" => current, |
||||
"jsonrpc" => "2.0", |
||||
"method" => "eth_getBlockByNumber", |
||||
"params" => [int_to_hash_string(current), true] |
||||
} |
||||
end |
||||
end |
||||
|
||||
defp handle_get_block_by_number({:ok, results}, block_start, block_end) do |
||||
{blocks, next} = |
||||
Enum.reduce(results, {[], :more}, fn |
||||
%{"result" => nil}, {blocks, _} -> {blocks, :end_of_chain} |
||||
%{"result" => %{} = block}, {blocks, next} -> {[block | blocks], next} |
||||
end) |
||||
|
||||
{:ok, next, decode_blocks(blocks), {block_start, block_end}} |
||||
end |
||||
|
||||
defp handle_get_block_by_number({:error, reason}, block_start, block_end) do |
||||
{:error, reason, {block_start, block_end}} |
||||
end |
||||
|
||||
defp decode_blocks(blocks) do |
||||
Enum.map(blocks, fn block -> |
||||
Map.merge(block, %{ |
||||
"hash" => String.downcase(block["hash"]), |
||||
"number" => decode_int(block["number"]), |
||||
"gasUsed" => decode_int(block["gasUsed"]), |
||||
"timestamp" => decode_time(block["timestamp"]), |
||||
"difficulty" => decode_int(block["difficulty"]), |
||||
"totalDifficulty" => decode_int(block["totalDifficulty"]), |
||||
"size" => decode_int(block["size"]), |
||||
"gasLimit" => decode_int(block["gasLimit"]), |
||||
"transactions" => decode_transactions(block["transactions"]) |
||||
}) |
||||
end) |
||||
end |
||||
|
||||
defp decode_transactions(transactions) do |
||||
Enum.map(transactions, fn transaction -> |
||||
Map.merge(transaction, %{ |
||||
"hash" => String.downcase(transaction["hash"]), |
||||
"value" => decode_int(transaction["value"]), |
||||
"gas" => decode_int(transaction["gas"]), |
||||
"gasPrice" => decode_int(transaction["gasPrice"]), |
||||
"nonce" => decode_int(transaction["nonce"]) |
||||
}) |
||||
end) |
||||
end |
||||
|
||||
defp json_rpc(payload, url) do |
||||
json = encode_json(payload) |
||||
headers = [{"Content-Type", "application/json"}] |
||||
|
||||
case HTTPoison.post(url, json, headers, config(:http)) do |
||||
{:ok, %HTTPoison.Response{body: body, status_code: code}} -> |
||||
body |> decode_json(payload) |> handle_response(code) |
||||
|
||||
{:error, %HTTPoison.Error{reason: reason}} -> |
||||
{:error, reason} |
||||
end |
||||
end |
||||
|
||||
defp handle_response(resp, 200) do |
||||
case resp do |
||||
[%{} | _] = batch_resp -> {:ok, batch_resp} |
||||
%{"error" => error} -> {:error, error} |
||||
%{"result" => result} -> {:ok, result} |
||||
end |
||||
end |
||||
|
||||
defp handle_response(resp, _status) do |
||||
{:error, resp} |
||||
end |
||||
|
||||
defp config(key) do |
||||
:explorer |
||||
|> Application.fetch_env!(:eth_client) |
||||
|> Keyword.fetch!(key) |
||||
end |
||||
|
||||
defp encode_json(data), do: Jason.encode_to_iodata!(data) |
||||
|
||||
defp decode_json(body, posted_payload) do |
||||
Jason.decode!(body) |
||||
rescue |
||||
Jason.DecodeError -> |
||||
Logger.error(""" |
||||
failed to decode json payload: |
||||
|
||||
#{inspect(body)} |
||||
|
||||
#{inspect(posted_payload)} |
||||
|
||||
""") |
||||
|
||||
raise("bad jason") |
||||
end |
||||
|
||||
defp int_to_hash_string(number), do: "0x" <> Integer.to_string(number, 16) |
||||
end |
@ -1,5 +0,0 @@ |
||||
defmodule Explorer.ExqNodeIdentifier do |
||||
@behaviour Exq.NodeIdentifier.Behaviour |
||||
@moduledoc "Configure Exq with the current dyno name" |
||||
def node_id, do: System.get_env("DYNO") |
||||
end |
@ -1,17 +0,0 @@ |
||||
defmodule Explorer.BalanceImporter do |
||||
@moduledoc "Imports a balance for a given address." |
||||
|
||||
alias Explorer.{Chain, Ethereum} |
||||
|
||||
def import(hash) do |
||||
encoded_balance = Ethereum.download_balance(hash) |
||||
|
||||
persist_balance(hash, encoded_balance) |
||||
end |
||||
|
||||
defp persist_balance(hash, encoded_balance) when is_binary(hash) do |
||||
decoded_balance = Ethereum.decode_integer_field(encoded_balance) |
||||
|
||||
Chain.update_balance(hash, decoded_balance) |
||||
end |
||||
end |
@ -1,81 +0,0 @@ |
||||
defmodule Explorer.BlockImporter do |
||||
@moduledoc "Imports a block." |
||||
|
||||
import Ecto.Query |
||||
import Ethereumex.HttpClient, only: [eth_get_block_by_number: 2] |
||||
|
||||
alias Explorer.{BlockImporter, Ethereum} |
||||
alias Explorer.Chain.Block |
||||
alias Explorer.Repo.NewRelic, as: Repo |
||||
alias Explorer.Workers.ImportTransaction |
||||
|
||||
def import(raw_block) when is_map(raw_block) do |
||||
changes = extract_block(raw_block) |
||||
block = changes.hash |> find() |
||||
|
||||
if is_nil(block.id), do: block |> Block.changeset(changes) |> Repo.insert() |
||||
|
||||
Enum.map(raw_block["transactions"], &ImportTransaction.perform/1) |
||||
end |
||||
|
||||
@dialyzer {:nowarn_function, import: 1} |
||||
def import("pending") do |
||||
raw_block = download_block("pending") |
||||
Enum.map(raw_block["transactions"], &ImportTransaction.perform_later/1) |
||||
end |
||||
|
||||
@dialyzer {:nowarn_function, import: 1} |
||||
def import(block_number) do |
||||
block_number |> download_block() |> BlockImporter.import() |
||||
end |
||||
|
||||
def find(hash) do |
||||
query = |
||||
from( |
||||
b in Block, |
||||
where: fragment("lower(?)", b.hash) == ^String.downcase(hash), |
||||
limit: 1 |
||||
) |
||||
|
||||
query |> Repo.one() || %Block{} |
||||
end |
||||
|
||||
@dialyzer {:nowarn_function, download_block: 1} |
||||
def download_block(block_number) do |
||||
{:ok, block} = |
||||
block_number |
||||
|> encode_number() |
||||
|> eth_get_block_by_number(true) |
||||
|
||||
block |
||||
end |
||||
|
||||
def extract_block(raw_block) do |
||||
%{ |
||||
hash: raw_block["hash"], |
||||
number: raw_block["number"] |> Ethereum.decode_integer_field(), |
||||
gas_used: raw_block["gasUsed"] |> Ethereum.decode_integer_field(), |
||||
timestamp: raw_block["timestamp"] |> Ethereum.decode_time_field(), |
||||
parent_hash: raw_block["parentHash"], |
||||
miner: raw_block["miner"], |
||||
difficulty: raw_block["difficulty"] |> Ethereum.decode_integer_field(), |
||||
total_difficulty: raw_block["totalDifficulty"] |> Ethereum.decode_integer_field(), |
||||
size: raw_block["size"] |> Ethereum.decode_integer_field(), |
||||
gas_limit: raw_block["gasLimit"] |> Ethereum.decode_integer_field(), |
||||
nonce: raw_block["nonce"] || "0" |
||||
} |
||||
end |
||||
|
||||
defp encode_number("latest"), do: "latest" |
||||
defp encode_number("earliest"), do: "earliest" |
||||
defp encode_number("pending"), do: "pending" |
||||
defp encode_number("0x" <> number) when is_binary(number), do: number |
||||
|
||||
defp encode_number(number) when is_binary(number) do |
||||
number |
||||
|> String.to_integer() |
||||
|> encode_number() |
||||
end |
||||
|
||||
defp encode_number(number), do: "0x" <> Integer.to_string(number, 16) |
||||
end |
@ -1,80 +0,0 @@ |
||||
defmodule Explorer.InternalTransactionImporter do |
||||
@moduledoc "Imports a transaction's internal transactions given its hash." |
||||
|
||||
import Ecto.Query |
||||
|
||||
alias Explorer.{Chain, Ethereum, EthereumexExtensions, Repo} |
||||
alias Explorer.Chain.{InternalTransaction, Transaction} |
||||
|
||||
@dialyzer {:nowarn_function, import: 1} |
||||
def import(hash) do |
||||
transaction = find_transaction(hash) |
||||
|
||||
hash |
||||
|> download_trace |
||||
|> extract_attrs |
||||
|> persist_internal_transactions(transaction) |
||||
end |
||||
|
||||
@dialyzer {:nowarn_function, download_trace: 1} |
||||
defp download_trace(hash) do |
||||
EthereumexExtensions.trace_transaction(hash) |
||||
end |
||||
|
||||
defp find_transaction(hash) do |
||||
query = |
||||
from( |
||||
t in Transaction, |
||||
where: fragment("lower(?)", t.hash) == ^String.downcase(hash), |
||||
limit: 1 |
||||
) |
||||
|
||||
Repo.one!(query) |
||||
end |
||||
|
||||
@dialyzer {:nowarn_function, extract_attrs: 1} |
||||
defp extract_attrs(attrs) do |
||||
trace = attrs["trace"] |
||||
trace |> Enum.with_index() |> Enum.map(&extract_trace/1) |
||||
end |
||||
|
||||
def extract_trace({trace, index}) do |
||||
%{ |
||||
index: index, |
||||
call_type: trace["action"]["callType"] || trace["type"], |
||||
to_address_id: trace |> to_address() |> address_id(), |
||||
from_address_id: trace |> from_address() |> address_id(), |
||||
trace_address: trace["traceAddress"], |
||||
value: trace["action"]["value"] |> Ethereum.decode_integer_field(), |
||||
gas: trace["action"]["gas"] |> Ethereum.decode_integer_field(), |
||||
gas_used: trace["result"]["gasUsed"] |> Ethereum.decode_integer_field(), |
||||
input: trace["action"]["input"], |
||||
output: trace["result"]["output"] |
||||
} |
||||
end |
||||
|
||||
defp to_address(%{"action" => %{"to" => address}}) |
||||
when not is_nil(address), |
||||
do: address |
||||
|
||||
defp to_address(%{"result" => %{"address" => address}}), do: address |
||||
|
||||
defp from_address(%{"action" => %{"from" => address}}), do: address |
||||
|
||||
@dialyzer {:nowarn_function, persist_internal_transactions: 2} |
||||
defp persist_internal_transactions(traces, transaction) do |
||||
Enum.map(traces, fn trace -> |
||||
trace = Map.merge(trace, %{transaction_id: transaction.id}) |
||||
|
||||
%InternalTransaction{} |
||||
|> InternalTransaction.changeset(trace) |
||||
|> Repo.insert() |
||||
end) |
||||
end |
||||
|
||||
defp address_id(hash) do |
||||
{:ok, address} = Chain.ensure_hash_address(hash) |
||||
|
||||
address.id |
||||
end |
||||
end |
@ -1,79 +0,0 @@ |
||||
defmodule Explorer.ReceiptImporter do |
||||
@moduledoc "Imports a transaction receipt given a transaction hash." |
||||
|
||||
import Ecto.Query |
||||
import Ethereumex.HttpClient, only: [eth_get_transaction_receipt: 1] |
||||
|
||||
alias Explorer.{Chain, Repo} |
||||
alias Explorer.Chain.{Receipt, Transaction} |
||||
|
||||
def import(hash) do |
||||
transaction = hash |> find_transaction() |
||||
|
||||
hash |
||||
|> download_receipt() |
||||
|> extract_receipt() |
||||
|> Map.put(:transaction_id, transaction.id) |
||||
|> save_receipt() |
||||
end |
||||
|
||||
@dialyzer {:nowarn_function, download_receipt: 1} |
||||
defp download_receipt(hash) do |
||||
{:ok, receipt} = eth_get_transaction_receipt(hash) |
||||
receipt || %{} |
||||
end |
||||
|
||||
defp find_transaction(hash) do |
||||
query = |
||||
from( |
||||
transaction in Transaction, |
||||
left_join: receipt in assoc(transaction, :receipt), |
||||
where: fragment("lower(?)", transaction.hash) == ^hash, |
||||
where: is_nil(receipt.id), |
||||
limit: 1 |
||||
) |
||||
|
||||
Repo.one(query) || Transaction.null() |
||||
end |
||||
|
||||
defp save_receipt(receipt) do |
||||
unless is_nil(receipt.transaction_id) do |
||||
%Receipt{} |
||||
|> Receipt.changeset(receipt) |
||||
|> Repo.insert() |
||||
end |
||||
end |
||||
|
||||
defp extract_receipt(receipt) do |
||||
logs = receipt["logs"] || [] |
||||
|
||||
%{ |
||||
index: receipt["transactionIndex"] |> decode_integer_field(), |
||||
cumulative_gas_used: receipt["cumulativeGasUsed"] |> decode_integer_field(), |
||||
gas_used: receipt["gasUsed"] |> decode_integer_field(), |
||||
status: receipt["status"] |> decode_integer_field(), |
||||
logs: logs |> Enum.map(&extract_log/1) |
||||
} |
||||
end |
||||
|
||||
defp extract_log(log) do |
||||
{:ok, address} = Chain.ensure_hash_address(log["address"]) |
||||
|
||||
%{ |
||||
address_id: address.id, |
||||
index: log["logIndex"] |> decode_integer_field(), |
||||
data: log["data"], |
||||
type: log["type"], |
||||
first_topic: log["topics"] |> Enum.at(0), |
||||
second_topic: log["topics"] |> Enum.at(1), |
||||
third_topic: log["topics"] |> Enum.at(2), |
||||
fourth_topic: log["topics"] |> Enum.at(3) |
||||
} |
||||
end |
||||
|
||||
defp decode_integer_field("0x" <> hex) when is_binary(hex) do |
||||
String.to_integer(hex, 16) |
||||
end |
||||
|
||||
defp decode_integer_field(field), do: field |
||||
end |
@ -1,142 +0,0 @@ |
||||
defmodule Explorer.TransactionImporter do |
||||
@moduledoc "Imports a transaction given a unique hash." |
||||
|
||||
import Ecto.Query |
||||
import Ethereumex.HttpClient, only: [eth_get_transaction_by_hash: 1] |
||||
|
||||
alias Explorer.{Chain, Ethereum, Repo, BalanceImporter} |
||||
alias Explorer.Chain.{Block, BlockTransaction, Transaction} |
||||
|
||||
def import(hash) when is_binary(hash) do |
||||
hash |> download_transaction() |> persist_transaction() |
||||
end |
||||
|
||||
def import(raw_transaction) when is_map(raw_transaction) do |
||||
persist_transaction(raw_transaction) |
||||
end |
||||
|
||||
def persist_transaction(raw_transaction) do |
||||
found_transaction = raw_transaction["hash"] |> find() |
||||
|
||||
transaction = |
||||
case is_nil(found_transaction.id) do |
||||
false -> |
||||
found_transaction |
||||
|
||||
true -> |
||||
to_address = |
||||
raw_transaction |
||||
|> to_address() |
||||
|> fetch_address() |
||||
|
||||
from_address = |
||||
raw_transaction |
||||
|> from_address() |
||||
|> fetch_address() |
||||
|
||||
changes = |
||||
raw_transaction |
||||
|> extract_attrs() |
||||
|> Map.put(:to_address_id, to_address.id) |
||||
|> Map.put(:from_address_id, from_address.id) |
||||
|
||||
found_transaction |> Transaction.changeset(changes) |> Repo.insert!() |
||||
end |
||||
|
||||
transaction |
||||
|> create_block_transaction(raw_transaction["blockHash"]) |
||||
|
||||
refresh_account_balances(raw_transaction) |
||||
|
||||
transaction |
||||
end |
||||
|
||||
def find(hash) do |
||||
query = |
||||
from( |
||||
t in Transaction, |
||||
where: fragment("lower(?)", t.hash) == ^String.downcase(hash), |
||||
limit: 1 |
||||
) |
||||
|
||||
query |> Repo.one() || %Transaction{} |
||||
end |
||||
|
||||
def download_transaction(hash) do |
||||
{:ok, payload} = eth_get_transaction_by_hash(hash) |
||||
payload |
||||
end |
||||
|
||||
def extract_attrs(raw_transaction) do |
||||
%{ |
||||
hash: raw_transaction["hash"], |
||||
value: raw_transaction["value"] |> Ethereum.decode_integer_field(), |
||||
gas: raw_transaction["gas"] |> Ethereum.decode_integer_field(), |
||||
gas_price: raw_transaction["gasPrice"] |> Ethereum.decode_integer_field(), |
||||
input: raw_transaction["input"], |
||||
nonce: raw_transaction["nonce"] |> Ethereum.decode_integer_field(), |
||||
public_key: raw_transaction["publicKey"], |
||||
r: raw_transaction["r"], |
||||
s: raw_transaction["s"], |
||||
standard_v: raw_transaction["standardV"], |
||||
transaction_index: raw_transaction["transactionIndex"], |
||||
v: raw_transaction["v"] |
||||
} |
||||
end |
||||
|
||||
def create_block_transaction(transaction, hash) do |
||||
query = |
||||
from( |
||||
t in Block, |
||||
where: fragment("lower(?)", t.hash) == ^String.downcase(hash), |
||||
limit: 1 |
||||
) |
||||
|
||||
block = query |> Repo.one() |
||||
|
||||
if block do |
||||
changes = %{block_id: block.id, transaction_id: transaction.id} |
||||
|
||||
case Repo.get_by(BlockTransaction, transaction_id: transaction.id) do |
||||
nil -> |
||||
%BlockTransaction{} |
||||
|> BlockTransaction.changeset(changes) |
||||
|> Repo.insert() |
||||
|
||||
block_transaction -> |
||||
block_transaction |
||||
|> BlockTransaction.changeset(%{block_id: block.id}) |
||||
|> Repo.update() |
||||
end |
||||
end |
||||
|
||||
transaction |
||||
end |
||||
|
||||
def to_address(%{"to" => to}) when not is_nil(to), do: to |
||||
def to_address(%{"creates" => creates}) when not is_nil(creates), do: creates |
||||
def to_address(hash) when is_bitstring(hash), do: hash |
||||
|
||||
def from_address(%{"from" => from}), do: from |
||||
def from_address(hash) when is_bitstring(hash), do: hash |
||||
|
||||
def fetch_address(hash) when is_bitstring(hash) do |
||||
{:ok, address} = Chain.ensure_hash_address(hash) |
||||
|
||||
address |
||||
end |
||||
|
||||
defp refresh_account_balances(raw_transaction) do |
||||
raw_transaction |
||||
|> to_address() |
||||
|> update_balance() |
||||
|
||||
raw_transaction |
||||
|> from_address() |
||||
|> update_balance() |
||||
end |
||||
|
||||
defp update_balance(address_hash) do |
||||
BalanceImporter.import(address_hash) |
||||
end |
||||
end |
@ -0,0 +1,38 @@ |
||||
defmodule Explorer.Indexer do |
||||
@moduledoc """ |
||||
TODO |
||||
""" |
||||
|
||||
alias Explorer.Chain |
||||
alias Explorer.Chain.Block |
||||
|
||||
def child_spec(opts) do |
||||
%{ |
||||
id: __MODULE__, |
||||
start: {Explorer.Indexer.Supervisor, :start_link, [opts]}, |
||||
restart: :permanent, |
||||
shutdown: 5000, |
||||
type: :supervisor |
||||
} |
||||
end |
||||
|
||||
@doc """ |
||||
TODO |
||||
""" |
||||
def last_indexed_block_number do |
||||
case Chain.get_latest_block() do |
||||
%Block{number: num} -> num |
||||
nil -> 0 |
||||
end |
||||
end |
||||
|
||||
@doc """ |
||||
TODO |
||||
""" |
||||
def next_block_number do |
||||
case last_indexed_block_number() do |
||||
0 -> 0 |
||||
num -> num + 1 |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,212 @@ |
||||
defmodule Explorer.Indexer.BlockFetcher do |
||||
@moduledoc """ |
||||
TODO |
||||
|
||||
## Next steps |
||||
|
||||
- after gensis index transition to RT index |
||||
""" |
||||
use GenServer |
||||
|
||||
require Logger |
||||
|
||||
alias Explorer.{Chain, ETH, Indexer} |
||||
alias Explorer.Indexer.Sequence |
||||
|
||||
defstruct ~w(current_block genesis_task subscription_id)a |
||||
|
||||
@batch_size 50 |
||||
@blocks_concurrency 10 |
||||
|
||||
@receipts_batch_size 250 |
||||
@receipts_concurrency 10 |
||||
|
||||
@internal_batch_size 50 |
||||
@internal_concurrency 4 |
||||
|
||||
@polling_interval 20_000 |
||||
|
||||
@doc """ |
||||
Ensures missing block number ranges are chunked into fetchable batches. |
||||
""" |
||||
def missing_block_numbers do |
||||
{count, missing_ranges} = Chain.missing_block_numbers() |
||||
|
||||
chunked_ranges = |
||||
Enum.flat_map(missing_ranges, fn |
||||
{start, ending} when ending - start <= @batch_size -> |
||||
[{start, ending}] |
||||
|
||||
{start, ending} -> |
||||
start |
||||
|> Stream.iterate(&(&1 + @batch_size)) |
||||
|> Enum.reduce_while([], fn |
||||
chunk_start, acc when chunk_start + @batch_size >= ending -> |
||||
{:halt, [{chunk_start, ending} | acc]} |
||||
|
||||
chunk_start, acc -> |
||||
{:cont, [{chunk_start, chunk_start + @batch_size - 1} | acc]} |
||||
end) |
||||
|> Enum.reverse() |
||||
end) |
||||
|
||||
{count, chunked_ranges} |
||||
end |
||||
|
||||
def start_link(opts) do |
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__) |
||||
end |
||||
|
||||
def init(_opts) do |
||||
send(self(), :index) |
||||
:timer.send_interval(15_000, self(), :debug_count) |
||||
|
||||
{:ok, %__MODULE__{current_block: 0, genesis_task: nil, subscription_id: nil}} |
||||
end |
||||
|
||||
def handle_info(:index, state) do |
||||
{count, missing_ranges} = missing_block_numbers() |
||||
current_block = Indexer.next_block_number() |
||||
|
||||
Logger.debug(fn -> "#{count} missed block ranges between genesis and #{current_block}" end) |
||||
|
||||
{:ok, genesis_task} = |
||||
Task.start_link(fn -> |
||||
stream_import(missing_ranges, current_block) |
||||
end) |
||||
|
||||
Process.monitor(genesis_task) |
||||
|
||||
{:noreply, %__MODULE__{state | genesis_task: genesis_task}} |
||||
end |
||||
|
||||
def handle_info(:poll, %__MODULE__{subscription_id: subscription_id} = state) do |
||||
Process.send_after(self(), :poll, @polling_interval) |
||||
|
||||
with {:ok, blocks} when length(blocks) > 0 <- ETH.check_for_updates(subscription_id) do |
||||
Logger.debug(fn -> "Processing #{length(blocks)} new block(s)" end) |
||||
|
||||
# TODO do something with the new blocks |
||||
ETH.fetch_blocks_by_hash(blocks) |
||||
end |
||||
|
||||
{:noreply, state} |
||||
end |
||||
|
||||
def handle_info({:DOWN, _ref, :process, pid, :normal}, %__MODULE__{genesis_task: pid} = state) do |
||||
Logger.info(fn -> "Finished index from genesis" end) |
||||
|
||||
{:ok, subscription_id} = ETH.listen_for_new_blocks() |
||||
|
||||
send(self(), :poll) |
||||
|
||||
{:noreply, %__MODULE__{state | genesis_task: nil, subscription_id: subscription_id}} |
||||
end |
||||
|
||||
def handle_info(:debug_count, state) do |
||||
Logger.debug(fn -> |
||||
""" |
||||
|
||||
================================ |
||||
persisted counts |
||||
================================ |
||||
blocks: #{Chain.block_count()} |
||||
internal transactions: #{Chain.internal_transaction_count()} |
||||
receipts: #{Chain.receipt_count()} |
||||
logs: #{Chain.log_count()} |
||||
""" |
||||
end) |
||||
|
||||
{:noreply, state} |
||||
end |
||||
|
||||
defp stream_import(missing_ranges, current_block) do |
||||
{:ok, seq} = Sequence.start_link(missing_ranges, current_block, @batch_size) |
||||
|
||||
seq |
||||
|> Sequence.build_stream() |
||||
|> Task.async_stream( |
||||
fn {block_start, block_end} = range -> |
||||
with {:ok, next, blocks, range} <- ETH.fetch_blocks_by_range(block_start, block_end), |
||||
:ok <- cap_seq(seq, next, range), |
||||
transaction_hashes <- collect_transaction_hashes(blocks), |
||||
{:ok, receipts} <- fetch_transaction_receipts(transaction_hashes), |
||||
{:ok, internals} <- fetch_internal_transactions(transaction_hashes) do |
||||
import_blocks(blocks, internals, receipts, seq, range) |
||||
else |
||||
{:error, reason} -> |
||||
Logger.debug(fn -> |
||||
"failed to fetch blocks #{inspect(range)}: #{inspect(reason)}. Retrying" |
||||
end) |
||||
|
||||
:ok = Sequence.inject_range(seq, range) |
||||
end |
||||
end, |
||||
max_concurrency: @blocks_concurrency, |
||||
timeout: :infinity |
||||
) |
||||
|> Enum.each(fn {:ok, :ok} -> :ok end) |
||||
end |
||||
|
||||
defp cap_seq(seq, :end_of_chain, {_block_start, block_end}) do |
||||
Logger.info("Reached end of blockchain #{inspect(block_end)}") |
||||
:ok = Sequence.cap(seq) |
||||
end |
||||
|
||||
defp cap_seq(_seq, :more, {block_start, block_end}) do |
||||
Logger.debug(fn -> "got blocks #{block_start} - #{block_end}" end) |
||||
:ok |
||||
end |
||||
|
||||
defp fetch_transaction_receipts([]), do: {:ok, %{}} |
||||
|
||||
defp fetch_transaction_receipts(hashes) do |
||||
Logger.debug(fn -> "fetching #{length(hashes)} transaction receipts" end) |
||||
stream_opts = [max_concurrency: @receipts_concurrency, timeout: :infinity] |
||||
|
||||
hashes |
||||
|> Enum.chunk_every(@receipts_batch_size) |
||||
|> Task.async_stream(Ð.fetch_transaction_receipts(&1), stream_opts) |
||||
|> Enum.reduce_while({:ok, %{}}, fn |
||||
{:ok, {:ok, receipts}}, {:ok, acc} -> {:cont, {:ok, Map.merge(acc, receipts)}} |
||||
{:ok, {:error, reason}}, {:ok, _acc} -> {:halt, {:error, reason}} |
||||
{:error, reason}, {:ok, _acc} -> {:halt, {:error, reason}} |
||||
end) |
||||
end |
||||
|
||||
defp fetch_internal_transactions([]), do: {:ok, %{}} |
||||
|
||||
defp fetch_internal_transactions(hashes) do |
||||
Logger.debug(fn -> "fetching #{length(hashes)} internal transactions" end) |
||||
stream_opts = [max_concurrency: @internal_concurrency, timeout: :infinity] |
||||
|
||||
hashes |
||||
|> Enum.chunk_every(@internal_batch_size) |
||||
|> Task.async_stream(Ð.fetch_internal_transactions(&1), stream_opts) |
||||
|> Enum.reduce_while({:ok, %{}}, fn |
||||
{:ok, {:ok, trans}}, {:ok, acc} -> {:cont, {:ok, Map.merge(acc, trans)}} |
||||
{:ok, {:error, reason}}, {:ok, _acc} -> {:halt, {:error, reason}} |
||||
{:error, reason}, {:ok, _acc} -> {:halt, {:error, reason}} |
||||
end) |
||||
end |
||||
|
||||
defp import_blocks(blocks, internal_transactions, receipts, seq, range) do |
||||
case Chain.import_blocks(blocks, internal_transactions, receipts) do |
||||
{:ok, _results} -> |
||||
:ok |
||||
|
||||
{:error, step, reason, _changes} -> |
||||
Logger.debug(fn -> |
||||
"failed to insert blocks during #{step} #{inspect(range)}: #{inspect(reason)}. Retrying" |
||||
end) |
||||
|
||||
:ok = Sequence.inject_range(seq, range) |
||||
end |
||||
end |
||||
|
||||
defp collect_transaction_hashes(raw_blocks) do |
||||
Enum.flat_map(raw_blocks, fn %{"transactions" => transactions} -> |
||||
Enum.map(transactions, fn %{"hash" => hash} -> hash end) |
||||
end) |
||||
end |
||||
end |
@ -0,0 +1,80 @@ |
||||
defmodule Explorer.Indexer.Sequence do |
||||
@moduledoc false |
||||
|
||||
use Agent |
||||
|
||||
defstruct ~w(current mode queue step)a |
||||
|
||||
@type range :: {pos_integer(), pos_integer()} |
||||
|
||||
@doc """ |
||||
Stars a process for managing a block sequence. |
||||
""" |
||||
@spec start_link([range()], pos_integer(), pos_integer()) :: Agent.on_start() |
||||
def start_link(initial_ranges, range_start, step) do |
||||
Agent.start_link(fn -> |
||||
%__MODULE__{ |
||||
current: range_start, |
||||
step: step, |
||||
mode: :infinite, |
||||
queue: :queue.from_list(initial_ranges) |
||||
} |
||||
end) |
||||
end |
||||
|
||||
@doc """ |
||||
Adds a range of block numbers to the sequence. |
||||
""" |
||||
@spec inject_range(pid(), range()) :: :ok |
||||
def inject_range(sequencer, {_first, _last} = range) when is_pid(sequencer) do |
||||
Agent.update(sequencer, fn state -> |
||||
%__MODULE__{state | queue: :queue.in(range, state.queue)} |
||||
end) |
||||
end |
||||
|
||||
@doc """ |
||||
Changes the mode for the sequencer to signal continuous streaming mode. |
||||
""" |
||||
@spec cap(pid()) :: :ok |
||||
def cap(sequencer) when is_pid(sequencer) do |
||||
Agent.update(sequencer, fn state -> |
||||
%__MODULE__{state | mode: :finite} |
||||
end) |
||||
end |
||||
|
||||
@doc """ |
||||
Builds an enumerable stream using a sequencer agent. |
||||
""" |
||||
@spec build_stream(pid()) :: Enumerable.t() |
||||
def build_stream(sequencer) when is_pid(sequencer) do |
||||
Stream.resource( |
||||
fn -> sequencer end, |
||||
fn seq -> |
||||
case pop(seq) do |
||||
:halt -> {:halt, seq} |
||||
range -> {[range], seq} |
||||
end |
||||
end, |
||||
fn seq -> seq end |
||||
) |
||||
end |
||||
|
||||
@doc """ |
||||
Pops the next block range from the sequence. |
||||
""" |
||||
@spec pop(pid()) :: range() | :halt |
||||
def pop(sequencer) when is_pid(sequencer) do |
||||
Agent.get_and_update(sequencer, fn %__MODULE__{current: current, step: step} = state -> |
||||
case {state.mode, :queue.out(state.queue)} do |
||||
{_, {{:value, {starting, ending}}, new_queue}} -> |
||||
{{starting, ending}, %__MODULE__{state | queue: new_queue}} |
||||
|
||||
{:infinite, {:empty, new_queue}} -> |
||||
{{current, current + step - 1}, %__MODULE__{state | current: current + step, queue: new_queue}} |
||||
|
||||
{:finite, {:empty, new_queue}} -> |
||||
{:halt, %__MODULE__{state | queue: new_queue}} |
||||
end |
||||
end) |
||||
end |
||||
end |
@ -0,0 +1,19 @@ |
||||
defmodule Explorer.Indexer.Supervisor do |
||||
@moduledoc """ |
||||
Supervising the fetchers for the `Explorer.Indexer` |
||||
""" |
||||
|
||||
alias Explorer.Indexer.BlockFetcher |
||||
|
||||
def start_link(opts) do |
||||
Supervisor.start_link(__MODULE__, opts) |
||||
end |
||||
|
||||
def init(_opts) do |
||||
children = [ |
||||
{BlockFetcher, []} |
||||
] |
||||
|
||||
Supervisor.init(children, strategy: :one_for_one) |
||||
end |
||||
end |
@ -1,4 +0,0 @@ |
||||
defmodule Explorer.Scheduler do |
||||
@moduledoc false |
||||
use Quantum.Scheduler, otp_app: :explorer |
||||
end |
@ -1,21 +0,0 @@ |
||||
defmodule Explorer.SkippedBalances do |
||||
@moduledoc "Gets a list of Addresses that do not have balances." |
||||
|
||||
alias Explorer.Chain.Address |
||||
alias Explorer.Repo.NewRelic, as: Repo |
||||
|
||||
import Ecto.Query, only: [from: 2] |
||||
|
||||
def fetch(count) do |
||||
query = |
||||
from( |
||||
address in Address, |
||||
select: address.hash, |
||||
where: is_nil(address.balance), |
||||
limit: ^count |
||||
) |
||||
|
||||
query |
||||
|> Repo.all() |
||||
end |
||||
end |
@ -1,32 +0,0 @@ |
||||
defmodule Explorer.SkippedBlocks do |
||||
@moduledoc """ |
||||
Fill in older blocks that were skipped during processing. |
||||
""" |
||||
import Ecto.Query, only: [from: 2, limit: 2] |
||||
|
||||
alias Explorer.Chain.Block |
||||
alias Explorer.Repo.NewRelic, as: Repo |
||||
|
||||
@missing_number_query "SELECT generate_series(?, 0, -1) AS missing_number" |
||||
|
||||
def first, do: first(1) |
||||
|
||||
def first(count) do |
||||
blocks = |
||||
from( |
||||
b in Block, |
||||
right_join: fragment(@missing_number_query, ^latest_block_number()), |
||||
on: b.number == fragment("missing_number"), |
||||
select: fragment("missing_number::text"), |
||||
where: is_nil(b.id), |
||||
limit: ^count |
||||
) |
||||
|
||||
Repo.all(blocks) |
||||
end |
||||
|
||||
def latest_block_number do |
||||
block = Repo.one(Block |> Block.latest() |> limit(1)) || Block.null() |
||||
block.number |
||||
end |
||||
end |
@ -1,25 +0,0 @@ |
||||
defmodule Explorer.SkippedInternalTransactions do |
||||
@moduledoc """ |
||||
Find transactions that do not have internal transactions. |
||||
""" |
||||
import Ecto.Query, only: [from: 2] |
||||
|
||||
alias Explorer.Chain.Transaction |
||||
alias Explorer.Repo.NewRelic, as: Repo |
||||
|
||||
def first, do: first(1) |
||||
|
||||
def first(count) do |
||||
transactions = |
||||
from( |
||||
transaction in Transaction, |
||||
left_join: internal_transactions in assoc(transaction, :internal_transactions), |
||||
select: fragment("hash"), |
||||
group_by: transaction.id, |
||||
having: count(internal_transactions.id) == 0, |
||||
limit: ^count |
||||
) |
||||
|
||||
Repo.all(transactions) |
||||
end |
||||
end |
@ -1,25 +0,0 @@ |
||||
defmodule Explorer.SkippedReceipts do |
||||
@moduledoc """ |
||||
Find transactions that do not have a receipt. |
||||
""" |
||||
import Ecto.Query, only: [from: 2] |
||||
|
||||
alias Explorer.Chain.Transaction |
||||
alias Explorer.Repo.NewRelic, as: Repo |
||||
|
||||
def first, do: first(1) |
||||
|
||||
def first(count) do |
||||
transactions = |
||||
from( |
||||
transaction in Transaction, |
||||
left_join: receipt in assoc(transaction, :receipt), |
||||
select: fragment("hash"), |
||||
group_by: transaction.id, |
||||
having: count(receipt.id) == 0, |
||||
limit: ^count |
||||
) |
||||
|
||||
Repo.all(transactions) |
||||
end |
||||
end |
@ -1,13 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportBalance do |
||||
@moduledoc "A worker that imports the balance for a given address." |
||||
|
||||
alias Explorer.BalanceImporter |
||||
|
||||
def perform(hash) do |
||||
BalanceImporter.import(hash) |
||||
end |
||||
|
||||
def perform_later(hash) do |
||||
Exq.enqueue(Exq.Enqueuer, "balances", __MODULE__, [hash]) |
||||
end |
||||
end |
@ -1,26 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportBlock do |
||||
@moduledoc "Imports blocks by web3 conventions." |
||||
|
||||
import Ethereumex.HttpClient, only: [eth_block_number: 0] |
||||
|
||||
alias Explorer.BlockImporter |
||||
|
||||
@dialyzer {:nowarn_function, perform: 1} |
||||
def perform("latest") do |
||||
case eth_block_number() do |
||||
{:ok, number} -> perform_later(number) |
||||
_ -> nil |
||||
end |
||||
end |
||||
|
||||
@dialyzer {:nowarn_function, perform: 1} |
||||
def perform(number), do: BlockImporter.import("#{number}") |
||||
|
||||
def perform_later("0x" <> number) when is_binary(number) do |
||||
number |> String.to_integer(16) |> perform_later() |
||||
end |
||||
|
||||
def perform_later(number) do |
||||
Exq.enqueue(Exq.Enqueuer, "blocks", __MODULE__, [number]) |
||||
end |
||||
end |
@ -1,12 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportInternalTransaction do |
||||
@moduledoc "Imports internal transactions via Parity trace endpoints." |
||||
|
||||
alias Explorer.InternalTransactionImporter |
||||
|
||||
@dialyzer {:nowarn_function, perform: 1} |
||||
def perform(hash), do: InternalTransactionImporter.import(hash) |
||||
|
||||
def perform_later(hash) do |
||||
Exq.enqueue(Exq.Enqueuer, "internal_transactions", __MODULE__, [hash]) |
||||
end |
||||
end |
@ -1,12 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportReceipt do |
||||
@moduledoc "Imports transaction by web3 conventions." |
||||
|
||||
alias Explorer.ReceiptImporter |
||||
|
||||
@dialyzer {:nowarn_function, perform: 1} |
||||
def perform(hash), do: ReceiptImporter.import(hash) |
||||
|
||||
def perform_later(hash) do |
||||
Exq.enqueue(Exq.Enqueuer, "receipts", __MODULE__, [hash]) |
||||
end |
||||
end |
@ -1,18 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportSkippedBlocks do |
||||
alias Explorer.SkippedBlocks |
||||
alias Explorer.Workers.ImportBlock |
||||
|
||||
@moduledoc "Imports skipped blocks." |
||||
|
||||
def perform, do: perform(1) |
||||
|
||||
def perform(count) do |
||||
count |> SkippedBlocks.first() |> Enum.map(&ImportBlock.perform_later/1) |
||||
end |
||||
|
||||
def perform_later, do: perform_later(1) |
||||
|
||||
def perform_later(count) do |
||||
Exq.enqueue(Exq.Enqueuer, "default", __MODULE__, [count]) |
||||
end |
||||
end |
@ -1,26 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportTransaction do |
||||
@moduledoc """ |
||||
Manages the lifecycle of importing a single Transaction from web3. |
||||
""" |
||||
|
||||
alias Explorer.TransactionImporter |
||||
alias Explorer.Workers.{ImportInternalTransaction, ImportReceipt} |
||||
|
||||
@dialyzer {:nowarn_function, perform: 1} |
||||
def perform(hash) when is_binary(hash) do |
||||
TransactionImporter.import(hash) |
||||
ImportInternalTransaction.perform_later(hash) |
||||
ImportReceipt.perform_later(hash) |
||||
end |
||||
|
||||
@dialyzer {:nowarn_function, perform: 1} |
||||
def perform(raw_transaction) when is_map(raw_transaction) do |
||||
TransactionImporter.import(raw_transaction) |
||||
ImportInternalTransaction.perform_later(raw_transaction["hash"]) |
||||
ImportReceipt.perform_later(raw_transaction["hash"]) |
||||
end |
||||
|
||||
def perform_later(hash) do |
||||
Exq.enqueue(Exq.Enqueuer, "transactions", __MODULE__, [hash]) |
||||
end |
||||
end |
@ -1,29 +0,0 @@ |
||||
defmodule Explorer.Workers.RefreshBalance do |
||||
@moduledoc """ |
||||
Refreshes the Credit and Debit balance views. |
||||
""" |
||||
|
||||
alias Ecto.Adapters.SQL |
||||
alias Explorer.Chain.{Credit, Debit} |
||||
alias Explorer.Repo |
||||
|
||||
def perform("credit"), do: unless(refreshing("credits"), do: Credit.refresh()) |
||||
def perform("debit"), do: unless(refreshing("debits"), do: Debit.refresh()) |
||||
|
||||
def perform do |
||||
perform_later(["credit"]) |
||||
perform_later(["debit"]) |
||||
end |
||||
|
||||
def perform_later(args \\ []) do |
||||
Exq.enqueue(Exq.Enqueuer, "default", __MODULE__, args) |
||||
end |
||||
|
||||
def refreshing(table) do |
||||
query = "REFRESH MATERIALIZED VIEW CONCURRENTLY #{table}%" |
||||
|
||||
result = SQL.query!(Repo, "SELECT TRUE FROM pg_stat_activity WHERE query ILIKE '$#{query}'", []) |
||||
|
||||
Enum.count(result.rows) > 0 |
||||
end |
||||
end |
@ -1,25 +0,0 @@ |
||||
defmodule Mix.Tasks.Exq.Start do |
||||
@moduledoc "Starts the Exq worker" |
||||
use Mix.Task |
||||
|
||||
alias Explorer.{Repo, Scheduler} |
||||
|
||||
def run(["scheduler"]) do |
||||
[:postgrex, :ecto, :ethereumex, :tzdata] |
||||
|> Enum.each(&Application.ensure_all_started/1) |
||||
|
||||
Repo.start_link() |
||||
Exq.start_link(mode: :enqueuer) |
||||
Scheduler.start_link() |
||||
:timer.sleep(:infinity) |
||||
end |
||||
|
||||
def run(_) do |
||||
[:postgrex, :ecto, :ethereumex, :tzdata] |
||||
|> Enum.each(&Application.ensure_all_started/1) |
||||
|
||||
Repo.start_link() |
||||
Exq.start_link(mode: :default) |
||||
:timer.sleep(:infinity) |
||||
end |
||||
end |
@ -1,24 +0,0 @@ |
||||
defmodule Mix.Tasks.Scrape.Balances do |
||||
@moduledoc "Populate Address balances." |
||||
|
||||
use Mix.Task |
||||
|
||||
alias Explorer.{BalanceImporter, Repo, SkippedBalances} |
||||
|
||||
def run([]), do: run(1) |
||||
|
||||
def run(count) do |
||||
[:postgrex, :ecto, :ethereumex, :tzdata] |
||||
|> Enum.each(&Application.ensure_all_started/1) |
||||
|
||||
Repo.start_link() |
||||
Exq.start_link(mode: :enqueuer) |
||||
|
||||
"#{count}" |
||||
|> String.to_integer() |
||||
|> SkippedBalances.fetch() |
||||
|> Flow.from_enumerable() |
||||
|> Flow.map(&BalanceImporter.import/1) |
||||
|> Enum.to_list() |
||||
end |
||||
end |
@ -1,26 +0,0 @@ |
||||
defmodule Mix.Tasks.Scrape.Blocks do |
||||
@moduledoc "Scrapes blocks from web3" |
||||
|
||||
use Mix.Task |
||||
|
||||
alias Explorer.{BlockImporter, Repo, SkippedBlocks} |
||||
|
||||
def run([]), do: run(1) |
||||
|
||||
def run(count) do |
||||
[:postgrex, :ecto, :ethereumex, :tzdata] |
||||
|> Enum.each(&Application.ensure_all_started/1) |
||||
|
||||
Repo.start_link() |
||||
Exq.start_link(mode: :enqueuer) |
||||
|
||||
"#{count}" |
||||
|> String.to_integer() |
||||
|> SkippedBlocks.first() |
||||
|> Enum.shuffle() |
||||
|> Flow.from_enumerable() |
||||
|> Flow.map(&BlockImporter.download_block/1) |
||||
|> Flow.map(&BlockImporter.import/1) |
||||
|> Enum.to_list() |
||||
end |
||||
end |
@ -1,24 +0,0 @@ |
||||
defmodule Mix.Tasks.Scrape.InternalTransactions do |
||||
@moduledoc "Backfill Internal Transactions via Parity Trace." |
||||
|
||||
use Mix.Task |
||||
|
||||
alias Explorer.{InternalTransactionImporter, Repo, SkippedInternalTransactions} |
||||
|
||||
def run([]), do: run(1) |
||||
|
||||
def run(count) do |
||||
[:postgrex, :ecto, :ethereumex, :tzdata] |
||||
|> Enum.each(&Application.ensure_all_started/1) |
||||
|
||||
Repo.start_link() |
||||
|
||||
"#{count}" |
||||
|> String.to_integer() |
||||
|> SkippedInternalTransactions.first() |
||||
|> Enum.shuffle() |
||||
|> Flow.from_enumerable() |
||||
|> Flow.map(&InternalTransactionImporter.import/1) |
||||
|> Enum.to_list() |
||||
end |
||||
end |
@ -1,24 +0,0 @@ |
||||
defmodule Mix.Tasks.Scrape.Receipts do |
||||
@moduledoc "Scrapes blocks from web3" |
||||
|
||||
use Mix.Task |
||||
|
||||
alias Explorer.{ReceiptImporter, Repo, SkippedReceipts} |
||||
|
||||
def run([]), do: run(1) |
||||
|
||||
def run(count) do |
||||
[:postgrex, :ecto, :ethereumex, :tzdata] |
||||
|> Enum.each(&Application.ensure_all_started/1) |
||||
|
||||
Repo.start_link() |
||||
|
||||
"#{count}" |
||||
|> String.to_integer() |
||||
|> SkippedReceipts.first() |
||||
|> Enum.shuffle() |
||||
|> Flow.from_enumerable() |
||||
|> Flow.map(&ReceiptImporter.import/1) |
||||
|> Enum.to_list() |
||||
end |
||||
end |
@ -1,14 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.CreateTransactions do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create table(:transactions) do |
||||
add :hash, :string, null: false |
||||
add :block_id, references(:blocks), null: false |
||||
timestamps null: false |
||||
end |
||||
|
||||
create unique_index(:transactions, ["(lower(hash))"], name: :transactions_hash_index) |
||||
create index(:transactions, [:block_id]) |
||||
end |
||||
end |
@ -0,0 +1,37 @@ |
||||
defmodule Explorer.Repo.Migrations.CreateTransactions do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create table(:transactions) do |
||||
# Fields |
||||
add :gas, :numeric, precision: 100, null: false |
||||
add :gas_price, :numeric, precision: 100, null: false |
||||
add :hash, :string, null: false |
||||
add :input, :text, null: false |
||||
add :nonce, :integer, null: false |
||||
add :public_key, :string, null: false |
||||
add :r, :string, null: false |
||||
add :s, :string, null: false |
||||
add :standard_v, :string, null: false |
||||
add :transaction_index, :string, null: false |
||||
add :v, :string, null: false |
||||
add :value, :numeric, precision: 100, null: false |
||||
|
||||
timestamps null: false |
||||
|
||||
# Foreign Keys |
||||
|
||||
# null when a pending transaction |
||||
add :block_id, references(:blocks, on_delete: :delete_all), null: true |
||||
add :from_address_id, references(:addresses, on_delete: :delete_all) |
||||
add :to_address_id, references(:addresses, on_delete: :delete_all) |
||||
end |
||||
|
||||
create index(:transactions, :block_id) |
||||
create index(:transactions, :from_address_id) |
||||
create unique_index(:transactions, [:hash]) |
||||
create index(:transactions, :inserted_at) |
||||
create index(:transactions, :to_address_id) |
||||
create index(:transactions, :updated_at) |
||||
end |
||||
end |
@ -1,9 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.AddValueToTransactions do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
alter table(:transactions) do |
||||
add :value, :numeric, precision: 100, null: false |
||||
end |
||||
end |
||||
end |
@ -1,18 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.AddFieldsToTransactions do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
alter table(:transactions) do |
||||
add :gas, :numeric, precision: 100, null: false |
||||
add :gas_price, :numeric, precision: 100, null: false |
||||
add :input, :text, null: false |
||||
add :nonce, :integer, null: false |
||||
add :public_key, :string, null: false |
||||
add :r, :string, null: false |
||||
add :s, :string, null: false |
||||
add :standard_v, :string, null: false |
||||
add :transaction_index, :string, null: false |
||||
add :v, :string, null: false |
||||
end |
||||
end |
||||
end |
@ -1,14 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.CreateFromAddresses do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create table(:from_addresses, primary_key: false) do |
||||
add :transaction_id, references(:transactions), null: false, primary_key: true |
||||
add :address_id, references(:addresses), null: false |
||||
timestamps null: false |
||||
end |
||||
|
||||
create index(:from_addresses, :transaction_id, unique: true) |
||||
create index(:from_addresses, :address_id) |
||||
end |
||||
end |
@ -1,14 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.CreateToAddresses do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create table(:to_addresses, primary_key: false) do |
||||
add :transaction_id, references(:transactions), null: false, primary_key: true |
||||
add :address_id, references(:addresses), null: false |
||||
timestamps null: false |
||||
end |
||||
|
||||
create index(:to_addresses, :transaction_id, unique: true) |
||||
create index(:to_addresses, :address_id) |
||||
end |
||||
end |
@ -1,14 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.CreateBlockTransactions do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create table(:block_transactions, primary_key: false) do |
||||
add :block_id, references(:blocks) |
||||
add :transaction_id, references(:transactions), primary_key: true |
||||
timestamps null: false |
||||
end |
||||
|
||||
create unique_index(:block_transactions, :transaction_id) |
||||
create unique_index(:block_transactions, [:block_id, :transaction_id]) |
||||
end |
||||
end |
@ -1,9 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.RemoveBlockIdFromTransactions do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
alter table(:transactions) do |
||||
remove :block_id |
||||
end |
||||
end |
||||
end |
@ -1,8 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.AddIndicesToBlockAndBlockTransaction do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create index(:block_transactions, :block_id) |
||||
create index(:blocks, :timestamp) |
||||
end |
||||
end |
@ -1,8 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.AddTransactionsIndexToTimestamps do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create index(:transactions, :inserted_at) |
||||
create index(:transactions, :updated_at) |
||||
end |
||||
end |
@ -1,48 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.CreateBalancesViews do |
||||
use Ecto.Migration |
||||
|
||||
def up do |
||||
execute """ |
||||
CREATE MATERIALIZED VIEW credits AS |
||||
SELECT addresses.id AS address_id, |
||||
COALESCE(SUM(transactions.value), 0) AS value, |
||||
COUNT(to_addresses.address_id) AS count, |
||||
COALESCE(MIN(transactions.inserted_at), NOW()) AS inserted_at, |
||||
COALESCE(MAX(transactions.inserted_at), NOW()) AS updated_at |
||||
FROM addresses |
||||
INNER JOIN to_addresses ON to_addresses.address_id = addresses.id |
||||
INNER JOIN transactions ON transactions.id = to_addresses.transaction_id |
||||
INNER JOIN receipts ON receipts.transaction_id = transactions.id AND receipts.status = 1 |
||||
GROUP BY addresses.id |
||||
; |
||||
""" |
||||
|
||||
execute """ |
||||
CREATE MATERIALIZED VIEW debits AS |
||||
SELECT addresses.id AS address_id, |
||||
COALESCE(SUM(transactions.value), 0) AS value, |
||||
COUNT(from_addresses.address_id) AS count, |
||||
COALESCE(MIN(transactions.inserted_at), NOW()) AS inserted_at, |
||||
COALESCE(MAX(transactions.inserted_at), NOW()) AS updated_at |
||||
FROM addresses |
||||
INNER JOIN from_addresses ON from_addresses.address_id = addresses.id |
||||
INNER JOIN transactions ON transactions.id = from_addresses.transaction_id |
||||
INNER JOIN receipts ON receipts.transaction_id = transactions.id AND receipts.status = 1 |
||||
GROUP BY addresses.id |
||||
; |
||||
""" |
||||
|
||||
create unique_index(:credits, :address_id) |
||||
create index(:credits, :inserted_at) |
||||
create index(:credits, :updated_at) |
||||
|
||||
create unique_index(:debits, :address_id) |
||||
create index(:debits, :inserted_at) |
||||
create index(:debits, :updated_at) |
||||
end |
||||
|
||||
def down do |
||||
execute "DROP MATERIALIZED VIEW credits;" |
||||
execute "DROP MATERIALIZED VIEW debits;" |
||||
end |
||||
end |
@ -1,12 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.MoveAddressKeysToTransactions do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
alter table(:transactions) do |
||||
add :to_address_id, references(:addresses) |
||||
add :from_address_id, references(:addresses) |
||||
end |
||||
end |
||||
|
||||
|
||||
end |
@ -1,8 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.IndexTransactionAddressIds do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create index(:transactions, :to_address_id) |
||||
create index(:transactions, :from_address_id) |
||||
end |
||||
end |
@ -1,38 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.DedupInternalTransactions do |
||||
use Ecto.Migration |
||||
|
||||
def up do |
||||
execute "SELECT DISTINCT ON (transaction_id, index) * INTO internal_transactions_dedup FROM internal_transactions;" |
||||
execute "DROP TABLE internal_transactions;" |
||||
execute "ALTER TABLE internal_transactions_dedup RENAME TO internal_transactions;" |
||||
execute "CREATE SEQUENCE internal_transactions_id_seq OWNED BY internal_transactions.id;" |
||||
execute """ |
||||
ALTER TABLE internal_transactions |
||||
ALTER COLUMN id SET DEFAULT nextval('internal_transactions_id_seq'), |
||||
ALTER COLUMN id SET NOT NULL, |
||||
ALTER COLUMN transaction_id SET NOT NULL, |
||||
ALTER COLUMN to_address_id SET NOT NULL, |
||||
ALTER COLUMN from_address_id SET NOT NULL, |
||||
ALTER COLUMN index SET NOT NULL, |
||||
ALTER COLUMN call_type SET NOT NULL, |
||||
ALTER COLUMN trace_address SET NOT NULL, |
||||
ALTER COLUMN value SET NOT NULL, |
||||
ALTER COLUMN gas SET NOT NULL, |
||||
ALTER COLUMN gas_used SET NOT NULL, |
||||
ALTER COLUMN inserted_at SET NOT NULL, |
||||
ALTER COLUMN updated_at SET NOT NULL, |
||||
ADD FOREIGN KEY (from_address_id) REFERENCES addresses(id), |
||||
ADD FOREIGN KEY (to_address_id) REFERENCES addresses(id), |
||||
ADD FOREIGN KEY (transaction_id) REFERENCES transactions(id); |
||||
""" |
||||
execute "ALTER TABLE internal_transactions ADD PRIMARY KEY (id);" |
||||
execute "CREATE INDEX internal_transactions_from_address_id_index ON internal_transactions (from_address_id);" |
||||
execute "CREATE INDEX internal_transactions_to_address_id_index ON internal_transactions (to_address_id);" |
||||
execute "CREATE INDEX internal_transactions_transaction_id_index ON internal_transactions (transaction_id);" |
||||
execute "CREATE UNIQUE INDEX internal_transactions_transaction_id_index_index ON internal_transactions (transaction_id, index);" |
||||
end |
||||
|
||||
def down do |
||||
execute "DROP INDEX internal_transactions_transaction_id_index_index" |
||||
end |
||||
end |
@ -1,10 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.AddBalanceAndBalanceUpdatedAtToAddress do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
alter table(:addresses) do |
||||
add :balance, :numeric, precision: 100 |
||||
add :balance_updated_at, :utc_datetime |
||||
end |
||||
end |
||||
end |
@ -1,9 +0,0 @@ |
||||
defmodule Explorer.Repo.Migrations.AddReceiptIdToTransactions do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
alter table("transactions") do |
||||
add :receipt_id, :bigint |
||||
end |
||||
end |
||||
end |
@ -1,18 +0,0 @@ |
||||
defmodule Explorer.Chain.BlockTransactionTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Chain.BlockTransaction |
||||
|
||||
describe "changeset/2" do |
||||
test "with empty attributes" do |
||||
changeset = BlockTransaction.changeset(%BlockTransaction{}, %{}) |
||||
refute(changeset.valid?) |
||||
end |
||||
|
||||
test "with valid attributes" do |
||||
attrs = %{block_id: 4, transaction_id: 3} |
||||
changeset = BlockTransaction.changeset(%BlockTransaction{}, attrs) |
||||
assert(changeset.valid?) |
||||
end |
||||
end |
||||
end |
@ -1,13 +0,0 @@ |
||||
defmodule Explorer.Chain.FromAddressTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Chain.FromAddress |
||||
|
||||
describe "changeset/2" do |
||||
test "with valid attributes" do |
||||
params = params_for(:from_address) |
||||
changeset = FromAddress.changeset(%FromAddress{}, params) |
||||
assert changeset.valid? |
||||
end |
||||
end |
||||
end |
@ -1,13 +0,0 @@ |
||||
defmodule Explorer.Chain.ToAddressTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Chain.ToAddress |
||||
|
||||
describe "changeset/2" do |
||||
test "with valid attributes" do |
||||
params = params_for(:to_address) |
||||
changeset = ToAddress.changeset(%ToAddress{}, params) |
||||
assert changeset.valid? |
||||
end |
||||
end |
||||
end |
@ -1,14 +0,0 @@ |
||||
defmodule Explorer.EthereumexExtensionsTest do |
||||
use Explorer.DataCase |
||||
alias Explorer.EthereumexExtensions |
||||
|
||||
describe "trace_transaction/1" do |
||||
test "returns a transaction trace" do |
||||
use_cassette "ethereumex_extensions_trace_transaction_1" do |
||||
hash = "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68" |
||||
result = EthereumexExtensions.trace_transaction(hash) |
||||
assert(is_list(result["trace"])) |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,40 +0,0 @@ |
||||
defmodule Explorer.BalanceImporterTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.{Chain, BalanceImporter} |
||||
alias Explorer.Chain.Address |
||||
|
||||
describe "import/1" do |
||||
test "it updates the balance for an address" do |
||||
insert(:address, hash: "0x5cc18cc34175d358ff8e19b7f98566263c4106a0", balance: 5) |
||||
|
||||
BalanceImporter.import("0x5cc18cc34175d358ff8e19b7f98566263c4106a0") |
||||
|
||||
expected_balance = Decimal.new(1_572_374_181_095_000_000) |
||||
|
||||
assert {:ok, %Address{balance: ^expected_balance}} = |
||||
Chain.hash_to_address("0x5cc18cc34175d358ff8e19b7f98566263c4106a0") |
||||
end |
||||
|
||||
test "it updates the balance update time for an address" do |
||||
insert( |
||||
:address, |
||||
hash: "0x5cc18cc34175d358ff8e19b7f98566263c4106a0", |
||||
balance_updated_at: nil |
||||
) |
||||
|
||||
BalanceImporter.import("0x5cc18cc34175d358ff8e19b7f98566263c4106a0") |
||||
|
||||
assert {:ok, %Address{balance_updated_at: balance_updated_at}} = |
||||
Chain.hash_to_address("0x5cc18cc34175d358ff8e19b7f98566263c4106a0") |
||||
|
||||
refute is_nil(balance_updated_at) |
||||
end |
||||
|
||||
test "it creates an address if one does not exist" do |
||||
BalanceImporter.import("0x5cc18cc34175d358ff8e19b7f98566263c4106a0") |
||||
|
||||
assert {:ok, _} = Chain.hash_to_address("0x5cc18cc34175d358ff8e19b7f98566263c4106a0") |
||||
end |
||||
end |
||||
end |
@ -1,121 +0,0 @@ |
||||
defmodule Explorer.BlockImporterTest do |
||||
use Explorer.DataCase |
||||
|
||||
import Mock |
||||
|
||||
alias Explorer.BlockImporter |
||||
alias Explorer.Chain.{Block, Transaction} |
||||
alias Explorer.Workers.ImportTransaction |
||||
|
||||
describe "import/1" do |
||||
test "imports and saves a block to the database" do |
||||
use_cassette "block_importer_import_1_saves_the_block" do |
||||
with_mock ImportTransaction, perform: fn _ -> {:ok} end do |
||||
BlockImporter.import("0xc4f0d") |
||||
block = Block |> order_by(desc: :inserted_at) |> Repo.one() |
||||
|
||||
assert block.hash == "0x16cb43ccfb7875c14eb3f03bdc098e4af053160544270594fa429d256cbca64e" |
||||
end |
||||
end |
||||
end |
||||
|
||||
test "when a block with the same hash is imported it does not update the block" do |
||||
use_cassette "block_importer_import_1_duplicate_block" do |
||||
with_mock ImportTransaction, perform: fn hash -> insert(:transaction, hash: hash) end do |
||||
insert( |
||||
:block, |
||||
hash: "0x16cb43ccfb7875c14eb3f03bdc098e4af053160544270594fa429d256cbca64e", |
||||
gas_limit: 5 |
||||
) |
||||
|
||||
BlockImporter.import("0xc4f0d") |
||||
|
||||
block = |
||||
Repo.get_by( |
||||
Block, |
||||
hash: "0x16cb43ccfb7875c14eb3f03bdc098e4af053160544270594fa429d256cbca64e" |
||||
) |
||||
|
||||
assert block.gas_limit == 5 |
||||
assert Block |> Repo.all() |> Enum.count() == 1 |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
describe "import/1 pending" do |
||||
test "does not create a block" do |
||||
use_cassette "block_importer_import_1_pending" do |
||||
with_mock ImportTransaction, perform_later: fn _ -> {:ok} end do |
||||
BlockImporter.import("pending") |
||||
assert Block |> Repo.all() |> Enum.count() == 0 |
||||
end |
||||
end |
||||
end |
||||
|
||||
test "when a block with the same hash is imported does not create a block" do |
||||
use_cassette "block_importer_import_1_pending" do |
||||
with_mock ImportTransaction, perform_later: fn _ -> insert(:transaction) end do |
||||
BlockImporter.import("pending") |
||||
assert Transaction |> Repo.all() |> Enum.count() != 0 |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
describe "find/1" do |
||||
test "returns an empty block when there is no block with the given hash" do |
||||
assert BlockImporter.find("0xC001") == %Block{} |
||||
end |
||||
|
||||
test "returns the block with the requested hash" do |
||||
block = insert(:block, hash: "0xBEA75") |
||||
assert BlockImporter.find("0xBEA75").id == block.id |
||||
end |
||||
end |
||||
|
||||
describe "download_block/1" do |
||||
test "downloads the block" do |
||||
use_cassette "block_importer_download_block_1_downloads_the_block" do |
||||
raw_block = BlockImporter.download_block("0xc4f0d") |
||||
assert raw_block |
||||
end |
||||
end |
||||
end |
||||
|
||||
describe "extract_block/1" do |
||||
test "extracts the block attributes" do |
||||
extracted_block = |
||||
BlockImporter.extract_block(%{ |
||||
"difficulty" => "0xfffffffffffffffffffffffffffffffe", |
||||
"gasLimit" => "0x02", |
||||
"gasUsed" => "0x19522", |
||||
"hash" => "bananas", |
||||
"miner" => "0xdb1207770e0a4258d7a4ce49ab037f92564fea85", |
||||
"number" => "0x7f2fb", |
||||
"parentHash" => "0x70029f66ea5a3b2b1ede95079d95a2ab74b649b5b17cdcf6f29b6317e7c7efa6", |
||||
"size" => "0x10", |
||||
"timestamp" => "0x12", |
||||
"totalDifficulty" => "0xff", |
||||
"nonce" => "0xfb6e1a62d119228b", |
||||
"transactions" => [] |
||||
}) |
||||
|
||||
assert( |
||||
extracted_block == %{ |
||||
difficulty: 340_282_366_920_938_463_463_374_607_431_768_211_454, |
||||
gas_limit: 2, |
||||
gas_used: 103_714, |
||||
hash: "bananas", |
||||
nonce: "0xfb6e1a62d119228b", |
||||
miner: "0xdb1207770e0a4258d7a4ce49ab037f92564fea85", |
||||
number: 520_955, |
||||
parent_hash: "0x70029f66ea5a3b2b1ede95079d95a2ab74b649b5b17cdcf6f29b6317e7c7efa6", |
||||
size: 16, |
||||
timestamp: Timex.parse!("1970-01-01T00:00:18-00:00", "{ISO:Extended}"), |
||||
total_difficulty: 255 |
||||
} |
||||
) |
||||
end |
||||
end |
||||
end |
@ -1,115 +0,0 @@ |
||||
defmodule Explorer.InternalTransactionImporterTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Chain.InternalTransaction |
||||
alias Explorer.InternalTransactionImporter |
||||
|
||||
describe "import/1" do |
||||
test "imports and saves an internal transaction to the database" do |
||||
use_cassette "internal_transaction_importer_import_1" do |
||||
transaction = |
||||
insert( |
||||
:transaction, |
||||
hash: "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68" |
||||
) |
||||
|
||||
InternalTransactionImporter.import(transaction.hash) |
||||
internal_transactions = InternalTransaction |> Repo.all() |
||||
assert length(internal_transactions) == 2 |
||||
end |
||||
end |
||||
|
||||
test "imports internal transactions with ordered indexes" do |
||||
use_cassette "internal_transaction_importer_import_1" do |
||||
transaction = |
||||
insert( |
||||
:transaction, |
||||
hash: "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68" |
||||
) |
||||
|
||||
InternalTransactionImporter.import(transaction.hash) |
||||
|
||||
last_internal_transaction = InternalTransaction |> order_by(desc: :index) |> limit(1) |> Repo.one() |
||||
|
||||
assert last_internal_transaction.index == 1 |
||||
end |
||||
end |
||||
|
||||
test "imports an internal transaction that creates a contract" do |
||||
use_cassette "internal_transaction_importer_import_1_with_contract_creation" do |
||||
transaction = |
||||
insert( |
||||
:transaction, |
||||
hash: "0x27d64b8e8564d2852c88767e967b88405c99341509cd3a3504fd67a65277116d" |
||||
) |
||||
|
||||
InternalTransactionImporter.import(transaction.hash) |
||||
|
||||
last_internal_transaction = InternalTransaction |> order_by(desc: :index) |> limit(1) |> Repo.one() |
||||
|
||||
assert last_internal_transaction.call_type == "create" |
||||
end |
||||
end |
||||
|
||||
test "subsequent imports do not create duplicate internal transactions" do |
||||
use_cassette "internal_transaction_importer_import_1" do |
||||
transaction = |
||||
insert( |
||||
:transaction, |
||||
hash: "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68" |
||||
) |
||||
|
||||
InternalTransactionImporter.import(transaction.hash) |
||||
InternalTransactionImporter.import(transaction.hash) |
||||
|
||||
internal_transactions = InternalTransaction |> Repo.all() |
||||
assert length(internal_transactions) == 2 |
||||
end |
||||
end |
||||
|
||||
test "import fails if a transaction with the hash doesn't exist" do |
||||
hash = "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68" |
||||
assert_raise Ecto.NoResultsError, fn -> InternalTransactionImporter.import(hash) end |
||||
end |
||||
end |
||||
|
||||
describe "extract_trace" do |
||||
test "maps attributes to database record attributes when the trace is a call" do |
||||
trace = %{ |
||||
"action" => %{ |
||||
"callType" => "call", |
||||
"from" => "0xba9f067abbc4315ece8eb33e7a3d01030bb368ef", |
||||
"gas" => "0x4821f", |
||||
"input" => "0xd1f276d3", |
||||
"to" => "0xe213402e637565bb9de0651827517e7554693f53", |
||||
"value" => "0x0" |
||||
}, |
||||
"result" => %{ |
||||
"gasUsed" => "0x4e4", |
||||
"output" => "0x000000000000000000000000ba9f067abbc4315ece8eb33e7a3d01030bb368ef" |
||||
}, |
||||
"subtraces" => 0, |
||||
"traceAddress" => [2, 0], |
||||
"type" => "call" |
||||
} |
||||
|
||||
to_address = insert(:address, hash: "0xe213402e637565bb9de0651827517e7554693f53") |
||||
from_address = insert(:address, hash: "0xba9f067abbc4315ece8eb33e7a3d01030bb368ef") |
||||
|
||||
assert( |
||||
InternalTransactionImporter.extract_trace({trace, 2}) == %{ |
||||
index: 2, |
||||
to_address_id: to_address.id, |
||||
from_address_id: from_address.id, |
||||
call_type: "call", |
||||
trace_address: [2, 0], |
||||
value: 0, |
||||
gas: 295_455, |
||||
gas_used: 1252, |
||||
input: "0xd1f276d3", |
||||
output: "0x000000000000000000000000ba9f067abbc4315ece8eb33e7a3d01030bb368ef" |
||||
} |
||||
) |
||||
end |
||||
end |
||||
end |
@ -1,119 +0,0 @@ |
||||
defmodule Explorer.ReceiptImporterTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Chain.{Log, Receipt} |
||||
alias Explorer.ReceiptImporter |
||||
|
||||
describe "import/1" do |
||||
test "saves a receipt to the database" do |
||||
transaction = |
||||
insert( |
||||
:transaction, |
||||
hash: "0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291" |
||||
) |
||||
|
||||
use_cassette "transaction_importer_import_1_receipt" do |
||||
ReceiptImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291") |
||||
|
||||
receipt = Receipt |> preload([:transaction]) |> Repo.one() |
||||
assert receipt.transaction == transaction |
||||
end |
||||
end |
||||
|
||||
test "saves a receipt log" do |
||||
insert( |
||||
:transaction, |
||||
hash: "0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291" |
||||
) |
||||
|
||||
use_cassette "transaction_importer_import_1_receipt" do |
||||
ReceiptImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291") |
||||
|
||||
receipt = Receipt |> preload([:transaction]) |> Repo.one() |
||||
log = Log |> preload(receipt: :transaction) |> Repo.one() |
||||
assert log.receipt == receipt |
||||
end |
||||
end |
||||
|
||||
test "saves a receipt log for an address" do |
||||
insert( |
||||
:transaction, |
||||
hash: "0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291" |
||||
) |
||||
|
||||
address = insert(:address, hash: "0x353fe3ffbf77edef7f9c352c47965a38c07e837c") |
||||
|
||||
use_cassette "transaction_importer_import_1_receipt" do |
||||
ReceiptImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291") |
||||
|
||||
log = Log |> preload([:address]) |> Repo.one() |
||||
assert log.address == address |
||||
end |
||||
end |
||||
|
||||
test "saves a receipt for a failed transaction" do |
||||
insert( |
||||
:transaction, |
||||
hash: "0x2532864dc2e0d0bc2dfabf4685c0c03dbdbe9cf67ebc593fc82d41087ab71435" |
||||
) |
||||
|
||||
use_cassette "transaction_importer_import_1_failed" do |
||||
ReceiptImporter.import("0x2532864dc2e0d0bc2dfabf4685c0c03dbdbe9cf67ebc593fc82d41087ab71435") |
||||
|
||||
receipt = Repo.one(Receipt) |
||||
assert receipt.status == 0 |
||||
end |
||||
end |
||||
|
||||
test "saves a receipt for a transaction that ran out of gas" do |
||||
insert( |
||||
:transaction, |
||||
hash: "0x702e518267b0a57e4cb44b9db100afe4d7115f2d2650466a8c376f3dbb77eb35" |
||||
) |
||||
|
||||
use_cassette "transaction_importer_import_1_out_of_gas" do |
||||
ReceiptImporter.import("0x702e518267b0a57e4cb44b9db100afe4d7115f2d2650466a8c376f3dbb77eb35") |
||||
|
||||
receipt = Repo.one(Receipt) |
||||
assert receipt.status == 0 |
||||
end |
||||
end |
||||
|
||||
test "does not import a receipt for a transaction that already has one" do |
||||
transaction = |
||||
insert( |
||||
:transaction, |
||||
hash: "0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291" |
||||
) |
||||
|
||||
insert(:receipt, transaction: transaction) |
||||
|
||||
use_cassette "transaction_importer_import_1_receipt" do |
||||
ReceiptImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291") |
||||
|
||||
assert Repo.all(Receipt) |> Enum.count() == 1 |
||||
end |
||||
end |
||||
|
||||
test "does not import a receipt for a nonexistent transaction" do |
||||
use_cassette "transaction_importer_import_1_receipt" do |
||||
ReceiptImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291") |
||||
|
||||
assert Repo.all(Receipt) |> Enum.count() == 0 |
||||
end |
||||
end |
||||
|
||||
test "does not process a forever-pending receipt" do |
||||
insert( |
||||
:transaction, |
||||
hash: "0xde791cfcde3900d4771e5fcf8c11dc305714118df7aa7e42f84576e64dbf6246" |
||||
) |
||||
|
||||
use_cassette "transaction_importer_import_1_pending" do |
||||
ReceiptImporter.import("0xde791cfcde3900d4771e5fcf8c11dc305714118df7aa7e42f84576e64dbf6246") |
||||
|
||||
assert Repo.all(Receipt) |> Enum.count() == 0 |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,270 +0,0 @@ |
||||
defmodule Explorer.TransactionImporterTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Chain.{Address, BlockTransaction, Transaction} |
||||
alias Explorer.TransactionImporter |
||||
|
||||
@raw_transaction %{ |
||||
"creates" => nil, |
||||
"hash" => "pepino", |
||||
"value" => "0xde0b6b3a7640000", |
||||
"from" => "0x34d0ef2c", |
||||
"gas" => "0x21000", |
||||
"gasPrice" => "0x10000", |
||||
"input" => "0x5c8eff12", |
||||
"nonce" => "0x31337", |
||||
"publicKey" => "0xb39af9c", |
||||
"r" => "0x9", |
||||
"s" => "0x10", |
||||
"to" => "0x7a33b7d", |
||||
"standardV" => "0x11", |
||||
"transactionIndex" => "0x12", |
||||
"v" => "0x13" |
||||
} |
||||
|
||||
@processed_transaction %{ |
||||
hash: "pepino", |
||||
value: 1_000_000_000_000_000_000, |
||||
gas: 135_168, |
||||
gas_price: 65536, |
||||
input: "0x5c8eff12", |
||||
nonce: 201_527, |
||||
public_key: "0xb39af9c", |
||||
r: "0x9", |
||||
s: "0x10", |
||||
standard_v: "0x11", |
||||
transaction_index: "0x12", |
||||
v: "0x13" |
||||
} |
||||
|
||||
describe "import/1" do |
||||
test "imports and saves a transaction to the database" do |
||||
use_cassette "transaction_importer_import_saves_the_transaction" do |
||||
TransactionImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291") |
||||
|
||||
transaction = Transaction |> order_by(desc: :inserted_at) |> Repo.one() |
||||
|
||||
assert transaction.hash == "0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291" |
||||
end |
||||
end |
||||
|
||||
test "when the transaction has previously been saved does not update it" do |
||||
use_cassette "transaction_importer_updates_the_association" do |
||||
insert( |
||||
:transaction, |
||||
hash: "0x170baac4eca26076953370dd603c68eab340c0135b19b585010d3158a5dbbf23", |
||||
gas: 5 |
||||
) |
||||
|
||||
TransactionImporter.import("0x170baac4eca26076953370dd603c68eab340c0135b19b585010d3158a5dbbf23") |
||||
|
||||
transaction = Transaction |> order_by(desc: :inserted_at) |> Repo.one() |
||||
|
||||
assert transaction.gas == Decimal.new(5) |
||||
end |
||||
end |
||||
|
||||
test "binds an association to an existing block" do |
||||
use_cassette "transaction_importer_saves_the_association" do |
||||
block = |
||||
insert( |
||||
:block, |
||||
hash: "0xfce13392435a8e7dab44c07d482212efb9dc39a9bea1915a9ead308b55a617f9" |
||||
) |
||||
|
||||
TransactionImporter.import("0x64d851139325479c3bb7ccc6e6ab4cde5bc927dce6810190fe5d770a4c1ac333") |
||||
|
||||
transaction = |
||||
Transaction |
||||
|> Repo.get_by(hash: "0x64d851139325479c3bb7ccc6e6ab4cde5bc927dce6810190fe5d770a4c1ac333") |
||||
|
||||
block_transaction = BlockTransaction |> Repo.get_by(transaction_id: transaction.id) |
||||
|
||||
assert block_transaction.block_id == block.id |
||||
end |
||||
end |
||||
|
||||
test "when there is no block it does not save a block transaction" do |
||||
use_cassette "transaction_importer_txn_without_block" do |
||||
TransactionImporter.import("0xc6aa189827c14880f012a65292f7add7b5310094f8773a3d85b66303039b9dcf") |
||||
|
||||
transaction = |
||||
Transaction |
||||
|> Repo.get_by(hash: "0xc6aa189827c14880f012a65292f7add7b5310094f8773a3d85b66303039b9dcf") |
||||
|
||||
block_transaction = BlockTransaction |> Repo.get_by(transaction_id: transaction.id) |
||||
|
||||
refute block_transaction |
||||
end |
||||
end |
||||
|
||||
test "creates a from address" do |
||||
use_cassette "transaction_importer_creates_a_from_address" do |
||||
TransactionImporter.import("0xc445f5410912458c480d992dd93355ae3dad64d9f65db25a3cf43a9c609a2e0d") |
||||
|
||||
transaction = |
||||
Transaction |
||||
|> Repo.get_by(hash: "0xc445f5410912458c480d992dd93355ae3dad64d9f65db25a3cf43a9c609a2e0d") |
||||
|
||||
address = Address |> Repo.get_by(hash: "0xa5b4b372112ab8dbbb48c8d0edd89227e24ec785") |
||||
|
||||
assert transaction.from_address_id == address.id |
||||
end |
||||
end |
||||
|
||||
test "binds an existing from address" do |
||||
insert(:address, hash: "0xa5b4b372112ab8dbbb48c8d0edd89227e24ec785") |
||||
|
||||
use_cassette "transaction_importer_creates_a_from_address" do |
||||
TransactionImporter.import("0xc445f5410912458c480d992dd93355ae3dad64d9f65db25a3cf43a9c609a2e0d") |
||||
|
||||
transaction = |
||||
Transaction |
||||
|> Repo.get_by(hash: "0xc445f5410912458c480d992dd93355ae3dad64d9f65db25a3cf43a9c609a2e0d") |
||||
|
||||
address = Address |> Repo.get_by(hash: "0xa5b4b372112ab8dbbb48c8d0edd89227e24ec785") |
||||
|
||||
assert transaction.from_address_id == address.id |
||||
end |
||||
end |
||||
|
||||
test "creates a to address" do |
||||
use_cassette "transaction_importer_creates_a_to_address" do |
||||
TransactionImporter.import("0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c") |
||||
|
||||
transaction = |
||||
Transaction |
||||
|> Repo.get_by(hash: "0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c") |
||||
|
||||
address = Address |> Repo.get_by(hash: "0x24e5b8528fe83257d5fe3497ef616026713347f8") |
||||
|
||||
assert transaction.to_address_id == address.id |
||||
end |
||||
end |
||||
|
||||
test "binds an existing to address" do |
||||
insert(:address, hash: "0x24e5b8528fe83257d5fe3497ef616026713347f8") |
||||
|
||||
use_cassette "transaction_importer_creates_a_to_address" do |
||||
TransactionImporter.import("0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c") |
||||
|
||||
transaction = |
||||
Transaction |
||||
|> Repo.get_by(hash: "0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c") |
||||
|
||||
address = Address |> Repo.get_by(hash: "0x24e5b8528fe83257d5fe3497ef616026713347f8") |
||||
|
||||
assert(transaction.to_address_id == address.id) |
||||
end |
||||
end |
||||
|
||||
test "creates a to address using creates when to is nil" do |
||||
use_cassette "transaction_importer_creates_a_to_address_from_creates" do |
||||
TransactionImporter.import("0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c") |
||||
|
||||
transaction = |
||||
Transaction |
||||
|> Repo.get_by(hash: "0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c") |
||||
|
||||
address = Address |> Repo.get_by(hash: "0x24e5b8528fe83257d5fe3497ef616026713347f8") |
||||
|
||||
assert(transaction.to_address_id == address.id) |
||||
end |
||||
end |
||||
|
||||
test "processes a map of transaction attributes" do |
||||
insert(:block, hash: "0xtakis") |
||||
|
||||
TransactionImporter.import(Map.merge(@raw_transaction, %{"hash" => "0xmunchos", "blockHash" => "0xtakis"})) |
||||
|
||||
last_transaction = Transaction |> order_by(desc: :inserted_at) |> limit(1) |> Repo.one() |
||||
|
||||
assert last_transaction.hash == "0xmunchos" |
||||
end |
||||
|
||||
test "gets balances for addresses" do |
||||
TransactionImporter.import("0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c") |
||||
|
||||
from_address = Address |> Repo.get_by(hash: "0xb2867180771b196518651c174c9240d5e8bd0ecd") |
||||
to_address = Address |> Repo.get_by(hash: "0x24e5b8528fe83257d5fe3497ef616026713347f8") |
||||
|
||||
assert(from_address.balance == Decimal.new(1_572_374_181_095_000_000)) |
||||
assert(to_address.balance == Decimal.new(1_572_374_181_095_000_000)) |
||||
end |
||||
end |
||||
|
||||
describe "find/1" do |
||||
test "returns an empty transaction when there is no transaction with the given hash" do |
||||
assert TransactionImporter.find("0xC001") == %Transaction{} |
||||
end |
||||
|
||||
test "returns the transaction with the requested hash" do |
||||
transaction = insert(:transaction, hash: "0xBEA75") |
||||
assert TransactionImporter.find("0xBEA75").id == transaction.id |
||||
end |
||||
end |
||||
|
||||
describe "download_transaction/1" do |
||||
test "downloads a transaction" do |
||||
use_cassette "transaction_importer_download_transaction" do |
||||
raw_transaction = |
||||
TransactionImporter.download_transaction("0x170baac4eca26076953370dd603c68eab340c0135b19b585010d3158a5dbbf23") |
||||
|
||||
assert(raw_transaction["from"] == "0xbe96ef1d056c97323e210fd0dd818aa027e57143") |
||||
end |
||||
end |
||||
|
||||
test "when it has an invalid hash" do |
||||
use_cassette "transaction_importer_download_transaction_with_a_bad_hash" do |
||||
assert_raise MatchError, fn -> |
||||
TransactionImporter.download_transaction("0xdecafisbadzzzz") |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
describe "extract_attrs/1" do |
||||
test "returns a changeset-friendly list of transaction attributes" do |
||||
transaction_attrs = TransactionImporter.extract_attrs(@raw_transaction) |
||||
assert transaction_attrs == @processed_transaction |
||||
end |
||||
end |
||||
|
||||
describe "create_block_transaction/2" do |
||||
test "inserts a block transaction" do |
||||
block = insert(:block) |
||||
transaction = insert(:transaction) |
||||
TransactionImporter.create_block_transaction(transaction, block.hash) |
||||
|
||||
block_transaction = |
||||
BlockTransaction |
||||
|> Repo.get_by(transaction_id: transaction.id, block_id: block.id) |
||||
|
||||
assert block_transaction |
||||
end |
||||
|
||||
test "updates an already existing block transaction" do |
||||
block = insert(:block) |
||||
transaction = insert(:transaction) |
||||
the_seventies = Timex.parse!("1970-01-01T00:00:18-00:00", "{ISO:Extended}") |
||||
|
||||
block_transaction = |
||||
insert(:block_transaction, %{ |
||||
block_id: block.id, |
||||
transaction_id: transaction.id, |
||||
inserted_at: the_seventies, |
||||
updated_at: the_seventies |
||||
}) |
||||
|
||||
update_block = insert(:block) |
||||
TransactionImporter.create_block_transaction(transaction, update_block.hash) |
||||
|
||||
updated_block_transaction = |
||||
BlockTransaction |
||||
|> Repo.get_by(transaction_id: transaction.id) |
||||
|
||||
refute block_transaction.block_id == updated_block_transaction.block_id |
||||
refute block_transaction.updated_at == updated_block_transaction.updated_at |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,93 @@ |
||||
defmodule Explorer.Indexer.SequenceTest do |
||||
use ExUnit.Case |
||||
|
||||
alias Explorer.Indexer.Sequence |
||||
|
||||
test "start_link" do |
||||
{:ok, pid} = Sequence.start_link([{1, 4}], 5, 1) |
||||
|
||||
assert state(pid) == %Sequence{ |
||||
current: 5, |
||||
mode: :infinite, |
||||
queue: {[{1, 4}], []}, |
||||
step: 1 |
||||
} |
||||
end |
||||
|
||||
test "inject_range" do |
||||
{:ok, pid} = Sequence.start_link([{1, 2}], 5, 1) |
||||
|
||||
assert :ok = Sequence.inject_range(pid, {3, 4}) |
||||
|
||||
assert state(pid) == %Sequence{ |
||||
current: 5, |
||||
mode: :infinite, |
||||
queue: {[{3, 4}], [{1, 2}]}, |
||||
step: 1 |
||||
} |
||||
end |
||||
|
||||
test "cap" do |
||||
{:ok, pid} = Sequence.start_link([{1, 2}], 5, 1) |
||||
|
||||
assert :ok = Sequence.cap(pid) |
||||
assert state(pid).mode == :finite |
||||
end |
||||
|
||||
describe "pop" do |
||||
test "with a non-empty queue in finite and infinite modes" do |
||||
{:ok, pid} = Sequence.start_link([{1, 4}, {6, 9}], 99, 5) |
||||
|
||||
assert {1, 4} == Sequence.pop(pid) |
||||
|
||||
assert state(pid) == %Sequence{ |
||||
current: 99, |
||||
mode: :infinite, |
||||
queue: {[], [{6, 9}]}, |
||||
step: 5 |
||||
} |
||||
|
||||
:ok = Sequence.cap(pid) |
||||
|
||||
assert {6, 9} == Sequence.pop(pid) |
||||
|
||||
assert state(pid) == %Sequence{ |
||||
current: 99, |
||||
mode: :finite, |
||||
queue: {[], []}, |
||||
step: 5 |
||||
} |
||||
end |
||||
|
||||
test "with an empty queue in infinite mode" do |
||||
{:ok, pid} = Sequence.start_link([], 5, 5) |
||||
|
||||
assert {5, 9} == Sequence.pop(pid) |
||||
|
||||
assert state(pid) == %Sequence{ |
||||
current: 10, |
||||
mode: :infinite, |
||||
queue: {[], []}, |
||||
step: 5 |
||||
} |
||||
end |
||||
|
||||
test "with an empty queue in finite mode" do |
||||
{:ok, pid} = Sequence.start_link([], 5, 5) |
||||
:ok = Sequence.cap(pid) |
||||
|
||||
assert :halt == Sequence.pop(pid) |
||||
|
||||
assert state(pid) == %Sequence{ |
||||
current: 5, |
||||
mode: :finite, |
||||
queue: {[], []}, |
||||
step: 5 |
||||
} |
||||
end |
||||
end |
||||
|
||||
defp state(sequencer) do |
||||
Agent.get(sequencer, & &1) |
||||
end |
||||
end |
@ -1,18 +0,0 @@ |
||||
defmodule Explorer.SkippedBalancesTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.SkippedBalances |
||||
|
||||
describe "fetch/1" do |
||||
test "returns a list of address hashes that do not have balances" do |
||||
insert(:address, hash: "0xcashews", balance: nil) |
||||
assert SkippedBalances.fetch(1) == ["0xcashews"] |
||||
end |
||||
|
||||
test "only get a limited set of addresses" do |
||||
insert_list(10, :address, balance: nil) |
||||
insert_list(5, :address, balance: 55) |
||||
assert length(SkippedBalances.fetch(7)) == 7 |
||||
end |
||||
end |
||||
end |
@ -1,77 +0,0 @@ |
||||
defmodule Explorer.SkippedBlocksTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.SkippedBlocks |
||||
|
||||
describe "first/0 when there are no blocks" do |
||||
test "returns no blocks" do |
||||
assert SkippedBlocks.first() == [] |
||||
end |
||||
end |
||||
|
||||
describe "first/0 when there are no skipped blocks" do |
||||
test "returns no blocks" do |
||||
insert(:block, %{number: 0}) |
||||
assert SkippedBlocks.first() == [] |
||||
end |
||||
end |
||||
|
||||
describe "first/0 when a block has been skipped" do |
||||
test "returns the first skipped block number" do |
||||
insert(:block, %{number: 0}) |
||||
insert(:block, %{number: 2}) |
||||
assert SkippedBlocks.first() == ["1"] |
||||
end |
||||
end |
||||
|
||||
describe "first/1 when there are no blocks" do |
||||
test "returns no blocks" do |
||||
assert SkippedBlocks.first(1) == [] |
||||
end |
||||
end |
||||
|
||||
describe "first/1 when there are no skipped blocks" do |
||||
test "returns no blocks" do |
||||
insert(:block, %{number: 0}) |
||||
assert SkippedBlocks.first(1) == [] |
||||
end |
||||
end |
||||
|
||||
describe "first/1 when a block has been skipped" do |
||||
test "returns the skipped block number" do |
||||
insert(:block, %{number: 1}) |
||||
assert SkippedBlocks.first(1) == ["0"] |
||||
end |
||||
|
||||
test "returns up to the requested number of skipped block numbers in reverse order" do |
||||
insert(:block, %{number: 1}) |
||||
insert(:block, %{number: 3}) |
||||
assert SkippedBlocks.first(1) == ["2"] |
||||
end |
||||
|
||||
test "returns only the skipped block number" do |
||||
insert(:block, %{number: 1}) |
||||
assert SkippedBlocks.first(100) == ["0"] |
||||
end |
||||
|
||||
test "returns all the skipped block numbers in random order" do |
||||
insert(:block, %{number: 1}) |
||||
insert(:block, %{number: 3}) |
||||
block_ids = SkippedBlocks.first(100) |
||||
assert("2" in block_ids and "0" in block_ids) |
||||
end |
||||
end |
||||
|
||||
describe "latest_block_number/0 when there are no blocks" do |
||||
test "returns -1" do |
||||
assert SkippedBlocks.latest_block_number() == -1 |
||||
end |
||||
end |
||||
|
||||
describe "latest_block_number/0 when there is a block" do |
||||
test "returns the number of the block" do |
||||
insert(:block, %{number: 1}) |
||||
assert SkippedBlocks.latest_block_number() == 1 |
||||
end |
||||
end |
||||
end |
@ -1,26 +0,0 @@ |
||||
defmodule Explorer.SkippedInternalTransactionsTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.SkippedInternalTransactions |
||||
|
||||
describe "first/0 when there are no transactions" do |
||||
test "returns no transaction hashes" do |
||||
assert SkippedInternalTransactions.first() == [] |
||||
end |
||||
end |
||||
|
||||
describe "first/0 when there are transactions with internal transactions" do |
||||
test "returns no transaction hashes" do |
||||
transaction = insert(:transaction) |
||||
insert(:internal_transaction, transaction: transaction) |
||||
assert SkippedInternalTransactions.first() == [] |
||||
end |
||||
end |
||||
|
||||
describe "first/0 when there are transactions with no internal transactions" do |
||||
test "returns the transaction hash" do |
||||
insert(:transaction, hash: "0xdeadbeef") |
||||
assert SkippedInternalTransactions.first() == ["0xdeadbeef"] |
||||
end |
||||
end |
||||
end |
@ -1,54 +0,0 @@ |
||||
defmodule Explorer.SkippedReceiptsTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.SkippedReceipts |
||||
|
||||
describe "first/0 when there are no transactions" do |
||||
test "returns no transactions" do |
||||
assert SkippedReceipts.first() == [] |
||||
end |
||||
end |
||||
|
||||
describe "first/0 when there are no skipped transactions" do |
||||
test "returns no transactions" do |
||||
transaction = insert(:transaction) |
||||
insert(:receipt, transaction: transaction) |
||||
assert SkippedReceipts.first() == [] |
||||
end |
||||
end |
||||
|
||||
describe "first/0 when a transaction has been skipped" do |
||||
test "returns the first skipped transaction hash" do |
||||
insert(:transaction, %{hash: "0xBEE75"}) |
||||
assert SkippedReceipts.first() == ["0xBEE75"] |
||||
end |
||||
end |
||||
|
||||
describe "first/1 when there are no transactions" do |
||||
test "returns no transactions" do |
||||
assert SkippedReceipts.first(1) == [] |
||||
end |
||||
end |
||||
|
||||
describe "first/1 when there are no skipped transactions" do |
||||
test "returns no transactions" do |
||||
transaction = insert(:transaction) |
||||
insert(:receipt, transaction: transaction) |
||||
assert SkippedReceipts.first(1) == [] |
||||
end |
||||
end |
||||
|
||||
describe "first/1 when a transaction has been skipped" do |
||||
test "returns the skipped transaction number" do |
||||
insert(:transaction, %{hash: "0xBEE75"}) |
||||
assert SkippedReceipts.first(1) == ["0xBEE75"] |
||||
end |
||||
|
||||
test "returns all the skipped transaction hashes in random order" do |
||||
insert(:transaction, %{hash: "0xBEE75"}) |
||||
insert(:transaction, %{hash: "0xBE475"}) |
||||
transaction_hashes = SkippedReceipts.first(100) |
||||
assert("0xBEE75" in transaction_hashes and "0xBE475" in transaction_hashes) |
||||
end |
||||
end |
||||
end |
@ -1,39 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportBalanceTest do |
||||
import Mock |
||||
|
||||
alias Explorer.Chain |
||||
alias Explorer.Chain.Address |
||||
alias Explorer.Workers.ImportBalance |
||||
|
||||
use Explorer.DataCase |
||||
|
||||
describe "perform/1" do |
||||
test "imports the balance for an address" do |
||||
ImportBalance.perform("0x1d12e5716c593b156eb7152ca4360f6224ba3b0a") |
||||
|
||||
expected_balance = Decimal.new(1_572_374_181_095_000_000) |
||||
|
||||
assert {:ok, %Address{balance: ^expected_balance}} = |
||||
Chain.hash_to_address("0x1d12e5716c593b156eb7152ca4360f6224ba3b0a") |
||||
end |
||||
end |
||||
|
||||
describe "perform_later/1" do |
||||
test "delays the import of the balance for an address" do |
||||
with_mock Exq, |
||||
enqueue: fn _, _, _, _ -> |
||||
insert( |
||||
:address, |
||||
hash: "0xskateboards", |
||||
balance: 66 |
||||
) |
||||
end do |
||||
ImportBalance.perform_later("0xskateboards") |
||||
|
||||
expected_balance = Decimal.new(66) |
||||
|
||||
assert {:ok, %Address{balance: ^expected_balance}} = Chain.hash_to_address("0xskateboards") |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,66 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportBlockTest do |
||||
use Explorer.DataCase |
||||
|
||||
import Mock |
||||
|
||||
alias Explorer.Chain.Block |
||||
alias Explorer.Repo |
||||
alias Explorer.Workers.ImportBlock |
||||
|
||||
describe "perform/1" do |
||||
test "imports the requested block number as an integer" do |
||||
use_cassette "import_block_perform_1_integer" do |
||||
ImportBlock.perform(1) |
||||
last_block = Block |> order_by(asc: :number) |> Repo.one() |
||||
assert last_block.number == 1 |
||||
end |
||||
end |
||||
|
||||
test "imports the requested block number as a string" do |
||||
use_cassette "import_block_perform_1_string" do |
||||
ImportBlock.perform("1") |
||||
last_block = Block |> order_by(asc: :number) |> Repo.one() |
||||
assert last_block.number == 1 |
||||
end |
||||
end |
||||
|
||||
test "imports the earliest block" do |
||||
use_cassette "import_block_perform_1_earliest" do |
||||
ImportBlock.perform("earliest") |
||||
last_block = Block |> order_by(asc: :number) |> Repo.one() |
||||
assert last_block.number == 0 |
||||
end |
||||
end |
||||
|
||||
test "imports the latest block" do |
||||
use_cassette "import_block_perform_1_latest" do |
||||
with_mock Exq, enqueue: fn _, _, _, [number] -> insert(:block, number: number) end do |
||||
ImportBlock.perform("latest") |
||||
last_block = Block |> order_by(asc: :number) |> Repo.one() |
||||
assert last_block.number > 0 |
||||
end |
||||
end |
||||
end |
||||
|
||||
test "when there is already a block with the requested hash" do |
||||
use_cassette "import_block_perform_1_duplicate" do |
||||
insert(:block, hash: "0x52c867bc0a91e573dc39300143c3bead7408d09d45bdb686749f02684ece72f3") |
||||
ImportBlock.perform("1") |
||||
block_count = Block |> Repo.all() |> Enum.count() |
||||
assert block_count == 1 |
||||
end |
||||
end |
||||
end |
||||
|
||||
describe "perform_later/1" do |
||||
test "does not retry fetching the latest block" do |
||||
use_cassette "import_block_perform_later_1_latest" do |
||||
with_mock Exq, enqueue: fn _, _, _, _ -> insert(:block, number: 1) end do |
||||
ImportBlock.perform_later("latest") |
||||
last_block = Block |> order_by(asc: :number) |> limit(1) |> Repo.one() |
||||
assert last_block.number == 1 |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,31 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportInternalTransactionTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Repo |
||||
alias Explorer.Chain.InternalTransaction |
||||
alias Explorer.Workers.ImportInternalTransaction |
||||
|
||||
describe "perform/1" do |
||||
test "does not import the internal transactions when no transaction with the hash exists" do |
||||
use_cassette "import_internal_transaction_perform_1" do |
||||
assert_raise Ecto.NoResultsError, fn -> |
||||
ImportInternalTransaction.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926") |
||||
end |
||||
end |
||||
end |
||||
|
||||
test "imports a receipt when an internal transaction with the hash exists" do |
||||
insert( |
||||
:transaction, |
||||
hash: "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68" |
||||
) |
||||
|
||||
use_cassette "import_internal_transaction_perform_1" do |
||||
ImportInternalTransaction.perform("0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68") |
||||
|
||||
internal_transaction_count = InternalTransaction |> Repo.all() |> Enum.count() |
||||
assert internal_transaction_count == 2 |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,31 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportReceiptTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Repo |
||||
alias Explorer.Chain.Receipt |
||||
alias Explorer.Workers.ImportReceipt |
||||
|
||||
describe "perform/1" do |
||||
test "does not import a receipt when no transaction with the hash exists" do |
||||
use_cassette "import_receipt_perform_1" do |
||||
ImportReceipt.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926") |
||||
|
||||
assert Repo.one(Receipt) == nil |
||||
end |
||||
end |
||||
|
||||
test "imports a receipt when a transaction with the hash exists" do |
||||
insert( |
||||
:transaction, |
||||
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926" |
||||
) |
||||
|
||||
use_cassette "import_receipt_perform_1" do |
||||
ImportReceipt.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926") |
||||
|
||||
receipt_count = Receipt |> Repo.all() |> Enum.count() |
||||
assert receipt_count == 1 |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,23 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportSkippedBlocksTest do |
||||
use Explorer.DataCase |
||||
|
||||
import Mock |
||||
|
||||
alias Explorer.Chain.Block |
||||
alias Explorer.Repo |
||||
alias Explorer.Workers.{ImportBlock, ImportSkippedBlocks} |
||||
|
||||
describe "perform/1" do |
||||
test "imports the requested number of skipped blocks" do |
||||
insert(:block, %{number: 2}) |
||||
|
||||
use_cassette "import_skipped_blocks_perform_1" do |
||||
with_mock ImportBlock, perform_later: fn number -> insert(:block, number: number) end do |
||||
ImportSkippedBlocks.perform(1) |
||||
last_block = Block |> order_by(asc: :number) |> limit(1) |> Repo.one() |
||||
assert last_block.number == 1 |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,148 +0,0 @@ |
||||
defmodule Explorer.Workers.ImportTransactionTest do |
||||
use Explorer.DataCase |
||||
|
||||
import Mock |
||||
|
||||
alias Explorer.Chain.{InternalTransaction, Receipt, Transaction} |
||||
alias Explorer.Repo |
||||
alias Explorer.Workers.ImportInternalTransaction |
||||
alias Explorer.Workers.ImportTransaction |
||||
|
||||
describe "perform/1" do |
||||
test "imports the requested transaction hash" do |
||||
use_cassette "import_transaction_perform_1" do |
||||
with_mock Exq, enqueue: fn _, _, _, _ -> :ok end do |
||||
ImportTransaction.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926") |
||||
end |
||||
|
||||
transaction = Transaction |> Repo.one() |
||||
|
||||
assert transaction.hash == "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926" |
||||
end |
||||
end |
||||
|
||||
test "when there is already a transaction with the requested hash" do |
||||
insert( |
||||
:transaction, |
||||
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926" |
||||
) |
||||
|
||||
use_cassette "import_transaction_perform_1" do |
||||
with_mock Exq, enqueue: fn _, _, _, _ -> :ok end do |
||||
ImportTransaction.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926") |
||||
end |
||||
|
||||
transaction_count = Transaction |> Repo.all() |> Enum.count() |
||||
assert transaction_count == 1 |
||||
end |
||||
end |
||||
|
||||
test "imports the receipt in another queue" do |
||||
transaction = |
||||
insert( |
||||
:transaction, |
||||
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926" |
||||
) |
||||
|
||||
use_cassette "import_transaction_perform_1" do |
||||
with_mock Exq, enqueue: fn _, _, _, _ -> insert(:receipt, transaction: transaction) end do |
||||
with_mock ImportInternalTransaction, perform_later: fn _ -> :ok end do |
||||
ImportTransaction.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926") |
||||
|
||||
receipt = Repo.one(Receipt) |
||||
refute is_nil(receipt) |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
test "imports the receipt in another queue when a map is supplied" do |
||||
transaction = |
||||
insert( |
||||
:transaction, |
||||
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926" |
||||
) |
||||
|
||||
use_cassette "import_transaction_perform_1" do |
||||
with_mock Exq, enqueue: fn _, _, _, _ -> insert(:receipt, transaction: transaction) end do |
||||
with_mock ImportInternalTransaction, perform_later: fn _ -> :ok end do |
||||
ImportTransaction.perform(%{ |
||||
"hash" => "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926", |
||||
"to" => "0xc001", |
||||
"from" => "0xbead5", |
||||
"blockHash" => "0xcafe" |
||||
}) |
||||
|
||||
receipt = Repo.one(Receipt) |
||||
refute is_nil(receipt) |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
test "imports the internal transactions in another queue" do |
||||
transaction = |
||||
insert( |
||||
:transaction, |
||||
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926" |
||||
) |
||||
|
||||
use_cassette "import_transaction_perform_1" do |
||||
with_mock Exq, enqueue: fn _, _, _, _ -> :ok end do |
||||
with_mock ImportInternalTransaction, |
||||
perform_later: fn _ -> insert(:internal_transaction, transaction: transaction) end do |
||||
ImportTransaction.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926") |
||||
|
||||
internal_transaction = Repo.one(InternalTransaction) |
||||
refute is_nil(internal_transaction) |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
test "imports the internal transactions in another queue when a map is supplied" do |
||||
transaction = |
||||
insert( |
||||
:transaction, |
||||
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926" |
||||
) |
||||
|
||||
use_cassette "import_transaction_perform_1" do |
||||
with_mock Exq, enqueue: fn _, _, _, _ -> :ok end do |
||||
with_mock ImportInternalTransaction, |
||||
perform_later: fn _ -> insert(:internal_transaction, transaction: transaction) end do |
||||
ImportTransaction.perform(%{ |
||||
"hash" => "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926", |
||||
"to" => "0xc001", |
||||
"from" => "0xbead5", |
||||
"blockHash" => "0xcafe" |
||||
}) |
||||
|
||||
internal_transaction = Repo.one(InternalTransaction) |
||||
refute is_nil(internal_transaction) |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
describe "perform_later/1" do |
||||
test "imports the transaction in another queue" do |
||||
use_cassette "import_transaction_perform_1" do |
||||
with_mock Exq, |
||||
enqueue: fn _, _, _, _ -> |
||||
insert( |
||||
:transaction, |
||||
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926" |
||||
) |
||||
end do |
||||
ImportTransaction.perform_later("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926") |
||||
|
||||
transaction = Repo.one(Transaction) |
||||
|
||||
assert transaction.hash == "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926" |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,52 +0,0 @@ |
||||
defmodule Explorer.Workers.RefreshBalanceTest do |
||||
use Explorer.DataCase |
||||
|
||||
import Mock |
||||
|
||||
alias Explorer.Chain.{Credit, Debit} |
||||
alias Explorer.Workers.RefreshBalance |
||||
|
||||
describe "perform/0" do |
||||
test "refreshes credit balances" do |
||||
with_mock Exq, enqueue: fn _, _, _, [type] -> RefreshBalance.perform(type) end do |
||||
address = insert(:address) |
||||
transaction = insert(:transaction, value: 20) |
||||
insert(:to_address, address: address, transaction: transaction) |
||||
insert(:receipt, transaction: transaction, status: 1) |
||||
RefreshBalance.perform() |
||||
assert Repo.one(Credit).value == Decimal.new(20) |
||||
end |
||||
end |
||||
|
||||
test "refreshes debit balances" do |
||||
with_mock Exq, enqueue: fn _, _, _, [type] -> RefreshBalance.perform(type) end do |
||||
address = insert(:address) |
||||
transaction = insert(:transaction, value: 20) |
||||
insert(:from_address, address: address, transaction: transaction) |
||||
insert(:receipt, transaction: transaction, status: 1) |
||||
RefreshBalance.perform() |
||||
assert Repo.one(Debit).value == Decimal.new(20) |
||||
end |
||||
end |
||||
end |
||||
|
||||
describe "perform/1" do |
||||
test "refreshes credit balances" do |
||||
address = insert(:address) |
||||
transaction = insert(:transaction, value: 20) |
||||
insert(:to_address, address: address, transaction: transaction) |
||||
insert(:receipt, transaction: transaction, status: 1) |
||||
RefreshBalance.perform("credit") |
||||
assert Repo.one(Credit).value == Decimal.new(20) |
||||
end |
||||
|
||||
test "refreshes debit balances" do |
||||
address = insert(:address) |
||||
transaction = insert(:transaction, value: 20) |
||||
insert(:from_address, address: address, transaction: transaction) |
||||
insert(:receipt, transaction: transaction, status: 1) |
||||
RefreshBalance.perform("debit") |
||||
assert Repo.one(Debit).value == Decimal.new(20) |
||||
end |
||||
end |
||||
end |
@ -1,9 +0,0 @@ |
||||
defmodule Explorer.Chain.BlockTransactionFactory do |
||||
defmacro __using__(_opts) do |
||||
quote do |
||||
def block_transaction_factory do |
||||
%Explorer.Chain.BlockTransaction{} |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,9 +0,0 @@ |
||||
defmodule Explorer.Chain.FromAddressFactory do |
||||
defmacro __using__(_opts) do |
||||
quote do |
||||
def from_address_factory do |
||||
%Explorer.Chain.FromAddress{} |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,9 +0,0 @@ |
||||
defmodule Explorer.Chain.ToAddressFactory do |
||||
defmacro __using__(_opts) do |
||||
quote do |
||||
def to_address_factory do |
||||
%Explorer.Chain.ToAddress{} |
||||
end |
||||
end |
||||
end |
||||
end |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue