* Fix bug when restarting index from lst block
* Include tests for block sequencer
* Check for new blocks periodically after indexing

Co-authored-by: Alex Garibay <alex@alexgaribay.com>
Co-authored-by: Luke Imhoff <luke.imhoff@dockyard.com>
pull/162/head
Chris McCord 7 years ago committed by Luke Imhoff
parent fa2ab3b24b
commit 117871a834
  1. 5
      .circleci/config.yml
  2. 9
      .credo.exs
  3. 30
      apps/explorer/config/config.exs
  4. 22
      apps/explorer/config/dev.exs
  5. 35
      apps/explorer/config/prod.exs
  6. 3
      apps/explorer/config/test.exs
  7. 18
      apps/explorer/lib/explorer/application.ex
  8. 190
      apps/explorer/lib/explorer/chain.ex
  9. 58
      apps/explorer/lib/explorer/chain/block.ex
  10. 25
      apps/explorer/lib/explorer/chain/block_transaction.ex
  11. 21
      apps/explorer/lib/explorer/chain/from_address.ex
  12. 37
      apps/explorer/lib/explorer/chain/internal_transaction.ex
  13. 52
      apps/explorer/lib/explorer/chain/receipt.ex
  14. 3
      apps/explorer/lib/explorer/chain/statistics.ex
  15. 4
      apps/explorer/lib/explorer/chain/statistics/server.ex
  16. 20
      apps/explorer/lib/explorer/chain/to_address.ex
  17. 38
      apps/explorer/lib/explorer/chain/transaction.ex
  18. 287
      apps/explorer/lib/explorer/eth.ex
  19. 5
      apps/explorer/lib/explorer/exq_node_identifier.ex
  20. 17
      apps/explorer/lib/explorer/importers/balance_importer.ex
  21. 81
      apps/explorer/lib/explorer/importers/block_importer.ex
  22. 80
      apps/explorer/lib/explorer/importers/internal_transaction_importer.ex
  23. 79
      apps/explorer/lib/explorer/importers/receipt_importer.ex
  24. 142
      apps/explorer/lib/explorer/importers/transaction_importer.ex
  25. 38
      apps/explorer/lib/explorer/indexer.ex
  26. 212
      apps/explorer/lib/explorer/indexer/block_fetcher.ex
  27. 80
      apps/explorer/lib/explorer/indexer/sequence.ex
  28. 19
      apps/explorer/lib/explorer/indexer/supervisor.ex
  29. 24
      apps/explorer/lib/explorer/repo.ex
  30. 4
      apps/explorer/lib/explorer/scheduler.ex
  31. 21
      apps/explorer/lib/explorer/skipped_balances.ex
  32. 32
      apps/explorer/lib/explorer/skipped_blocks.ex
  33. 25
      apps/explorer/lib/explorer/skipped_internal_transactions.ex
  34. 25
      apps/explorer/lib/explorer/skipped_receipts.ex
  35. 13
      apps/explorer/lib/explorer/workers/import_balance.ex
  36. 26
      apps/explorer/lib/explorer/workers/import_block.ex
  37. 12
      apps/explorer/lib/explorer/workers/import_internal_transaction.ex
  38. 12
      apps/explorer/lib/explorer/workers/import_receipt.ex
  39. 18
      apps/explorer/lib/explorer/workers/import_skipped_blocks.ex
  40. 26
      apps/explorer/lib/explorer/workers/import_transaction.ex
  41. 29
      apps/explorer/lib/explorer/workers/refresh_balance.ex
  42. 25
      apps/explorer/lib/mix/tasks/exq.start.ex
  43. 24
      apps/explorer/lib/mix/tasks/scrape.balances.ex
  44. 26
      apps/explorer/lib/mix/tasks/scrape.blocks.ex
  45. 24
      apps/explorer/lib/mix/tasks/scrape.internal_transactions.ex
  46. 24
      apps/explorer/lib/mix/tasks/scrape.receipts.ex
  47. 10
      apps/explorer/mix.exs
  48. 5
      apps/explorer/priv/repo/migrations/20180117221921_create_address.exs
  49. 20
      apps/explorer/priv/repo/migrations/20180117221922_create_blocks.exs
  50. 14
      apps/explorer/priv/repo/migrations/20180117221922_create_transactions.exs
  51. 37
      apps/explorer/priv/repo/migrations/20180117221923_create_transactions.exs
  52. 9
      apps/explorer/priv/repo/migrations/20180124003303_add_value_to_transactions.exs
  53. 18
      apps/explorer/priv/repo/migrations/20180129201141_add_fields_to_transactions.exs
  54. 14
      apps/explorer/priv/repo/migrations/20180130004126_create_from_addresses.exs
  55. 14
      apps/explorer/priv/repo/migrations/20180130004544_create_to_addresses.exs
  56. 14
      apps/explorer/priv/repo/migrations/20180202195342_create_block_transactions.exs
  57. 9
      apps/explorer/priv/repo/migrations/20180202215933_remove_block_id_from_transactions.exs
  58. 8
      apps/explorer/priv/repo/migrations/20180208010839_add_indices_to_block_and_block_transaction.exs
  59. 8
      apps/explorer/priv/repo/migrations/20180208054620_add_transactions_index_to_timestamps.exs
  60. 9
      apps/explorer/priv/repo/migrations/20180212214442_create_receipts.exs
  61. 13
      apps/explorer/priv/repo/migrations/20180212222309_create_logs.exs
  62. 48
      apps/explorer/priv/repo/migrations/20180216011950_create_balances_views.exs
  63. 28
      apps/explorer/priv/repo/migrations/20180221001948_create_internal_transactions.exs
  64. 12
      apps/explorer/priv/repo/migrations/20180223220816_move_address_keys_to_transactions.exs
  65. 8
      apps/explorer/priv/repo/migrations/20180223223257_index_transaction_address_ids.exs
  66. 0
      apps/explorer/priv/repo/migrations/20180224004300_create_credit_debit_materialized_view.exs
  67. 38
      apps/explorer/priv/repo/migrations/20180227004146_dedup_internal_transactions_and_add_unique_index.exs
  68. 10
      apps/explorer/priv/repo/migrations/20180227225553_add_balance_and_balance_updated_at_to_address.exs
  69. 9
      apps/explorer/priv/repo/migrations/20180301013446_add_receipt_id_to_transactions.exs
  70. 18
      apps/explorer/test/explorer/chain/block_transaction_test.exs
  71. 13
      apps/explorer/test/explorer/chain/from_address_test.exs
  72. 13
      apps/explorer/test/explorer/chain/statistics_test.exs
  73. 13
      apps/explorer/test/explorer/chain/to_address_test.exs
  74. 31
      apps/explorer/test/explorer/chain_test.exs
  75. 14
      apps/explorer/test/explorer/ethereumex_extensions_test.exs
  76. 40
      apps/explorer/test/explorer/importers/balance_importer_test.exs
  77. 121
      apps/explorer/test/explorer/importers/block_importer_test.exs
  78. 115
      apps/explorer/test/explorer/importers/internal_transaction_importer_test.exs
  79. 119
      apps/explorer/test/explorer/importers/receipt_importer_test.exs
  80. 270
      apps/explorer/test/explorer/importers/transaction_importer_test.exs
  81. 93
      apps/explorer/test/explorer/indexer/sequence_test.exs
  82. 18
      apps/explorer/test/explorer/skipped_balances_test.exs
  83. 77
      apps/explorer/test/explorer/skipped_blocks_test.exs
  84. 26
      apps/explorer/test/explorer/skipped_internal_transactions_test.exs
  85. 54
      apps/explorer/test/explorer/skipped_transactions_test.exs
  86. 39
      apps/explorer/test/explorer/workers/import_balance_test.exs
  87. 66
      apps/explorer/test/explorer/workers/import_block_test.exs
  88. 31
      apps/explorer/test/explorer/workers/import_internal_transaction_test.exs
  89. 31
      apps/explorer/test/explorer/workers/import_receipt_test.exs
  90. 23
      apps/explorer/test/explorer/workers/import_skipped_blocks_test.exs
  91. 148
      apps/explorer/test/explorer/workers/import_transaction_test.exs
  92. 52
      apps/explorer/test/explorer/workers/refresh_balance_test.exs
  93. 9
      apps/explorer/test/support/factories/chain/block_transaction_factory.ex
  94. 9
      apps/explorer/test/support/factories/chain/from_address_factory.ex
  95. 9
      apps/explorer/test/support/factories/chain/to_address_factory.ex
  96. 10
      apps/explorer/test/support/factories/chain/transaction_factory.ex
  97. 3
      apps/explorer/test/support/factory.ex
  98. 2
      apps/explorer_web/config/config.exs
  99. 22
      apps/explorer_web/lib/explorer_web/router.ex
  100. 7
      apps/explorer_web/mix.exs
  101. Some files were not shown because too many files have changed in this diff Show More

@ -325,7 +325,6 @@ jobs:
POSTGRES_PASSWORD: postgres
# match PGUSER for elixir image above
POSTGRES_USER: postgres
- image: circleci/redis:4.0.9-alpine
working_directory: ~/app
@ -340,10 +339,6 @@ jobs:
name: Wait for DB
command: dockerize -wait tcp://localhost:5432 -timeout 1m
- run:
name: Wait for Redis
command: dockerize -wait tcp://localhost:6379 -timeout 1m
- run: mix coveralls.circle --umbrella
- store_test_results:

@ -63,7 +63,7 @@
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage, excluded_lastnames: ~w(Number Time), priority: :low},
{Credo.Check.Design.AliasUsage, excluded_lastnames: ~w(DateTime Number Repo Time), priority: :low},
# For some checks, you can also set other parameters
#
@ -77,7 +77,7 @@
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagTODO, exit_status: 0},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
@ -122,11 +122,12 @@
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
{Credo.Check.Warning.RaiseInsideRescue},
{Credo.Check.Warning.RaiseInsideRescue, false},
# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize},
# TODO reenable before merging optimized-indexer branch
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem},
{Credo.Check.Refactor.VariableRebinding},
{Credo.Check.Warning.MapGetUnsafePass},

@ -5,7 +5,16 @@
# is restricted to this project.
use Mix.Config
config :ethereumex, url: "http://localhost:8545"
url = "https://sokol.poa.network"
config :explorer, :eth_client,
http: [recv_timeout: 60_000, timeout: 60_000, hackney: [pool: :eth]],
trace_url: "https://sokol-trace.poa.network",
url: url
config :ethereumex,
url: url,
http_options: [recv_timeout: 60_000, timeout: 60_000, hackney: [pool: :eth]]
# General application configuration
config :explorer, ecto_repos: [Explorer.Repo]
@ -14,25 +23,6 @@ config :explorer, :ethereum, backend: Explorer.Ethereum.Live
config :explorer, Explorer.Integrations.EctoLogger, query_time_ms_threshold: 2_000
config :exq,
host: "localhost",
port: 6379,
namespace: "exq",
start_on_application: false,
scheduler_enable: true,
shutdown_timeout: 5000,
max_retries: 10,
queues: [
{"default", 1},
{"balances", 1},
{"blocks", 1},
{"internal_transactions", 1},
{"transactions", 1},
{"receipts", 1}
]
config :exq_ui, server: false
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"

@ -5,27 +5,7 @@ config :explorer, Explorer.Repo,
adapter: Ecto.Adapters.Postgres,
database: "explorer_dev",
hostname: "localhost",
loggers: [],
pool_size: 10
# Configure Quantum
config :explorer, Explorer.Scheduler,
jobs: [
[
schedule: {:extended, "*/15 * * * * *"},
task: {Explorer.Workers.RefreshBalance, :perform_later, []}
],
[
schedule: {:extended, "*/5 * * * * *"},
task: {Explorer.Workers.ImportBlock, :perform_later, ["latest"]}
],
[
schedule: {:extended, "*/5 * * * * *"},
task: {Explorer.Workers.ImportBlock, :perform_later, ["pending"]}
],
[
schedule: {:extended, "*/15 * * * * *"},
task: {Explorer.Workers.ImportSkippedBlocks, :perform_later, [1]}
]
]
import_config "dev.secret.exs"

@ -12,38 +12,3 @@ config :explorer, Explorer.Repo,
# Configure Web3
config :ethereumex, url: System.get_env("ETHEREUM_URL")
# Configure Quantum
config :explorer, Explorer.Scheduler,
jobs: [
[
schedule: {:extended, System.get_env("EXQ_BALANCE_SCHEDULE") || "0 * * * * *"},
task: {Explorer.Workers.RefreshBalance, :perform_later, []}
],
[
schedule: {:extended, System.get_env("EXQ_LATEST_BLOCK_SCHEDULE") || "* * * * * *"},
task: {Explorer.Workers.ImportBlock, :perform_later, ["latest"]}
],
[
schedule: {:extended, System.get_env("EXQ_PENDING_BLOCK_SCHEDULE") || "* * * * * *"},
task: {Explorer.Workers.ImportBlock, :perform_later, ["pending"]}
],
[
schedule: {:extended, System.get_env("EXQ_BACKFILL_SCHEDULE") || "* * * * * *"},
task:
{Explorer.Workers.ImportSkippedBlocks, :perform_later,
[String.to_integer(System.get_env("EXQ_BACKFILL_BATCH_SIZE") || "1")]}
]
]
# Configure Exq
config :exq,
node_identifier: Explorer.ExqNodeIdentifier,
url: System.get_env("REDIS_URL"),
queues: [
{"blocks", String.to_integer(System.get_env("EXQ_BLOCKS_CONCURRENCY") || "1")},
{"default", String.to_integer(System.get_env("EXQ_CONCURRENCY") || "1")},
{"internal_transactions", String.to_integer(System.get_env("EXQ_INTERNAL_TRANSACTIONS_CONCURRENCY") || "1")},
{"receipts", String.to_integer(System.get_env("EXQ_RECEIPTS_CONCURRENCY") || "1")},
{"transactions", String.to_integer(System.get_env("EXQ_TRANSACTIONS_CONCURRENCY") || "1")}
]

@ -8,7 +8,4 @@ config :explorer, Explorer.Repo,
pool: Ecto.Adapters.SQL.Sandbox,
ownership_timeout: 60_000
# Configure ethereumex
config :ethereumex, url: "https://sokol-trace.poa.network"
config :explorer, :ethereum, backend: Explorer.Ethereum.Test

@ -5,7 +5,7 @@ defmodule Explorer.Application do
use Application
import Supervisor.Spec
import Supervisor.Spec, only: [supervisor: 3]
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@ -19,20 +19,24 @@ defmodule Explorer.Application do
defp children(:test), do: children()
defp children(_) do
exq_options = [] |> Keyword.put(:mode, :enqueuer)
children() ++
[
supervisor(Exq, [exq_options]),
worker(Explorer.Chain.Statistics.Server, []),
Explorer.ETH,
supervisor(Task.Supervisor, [[name: Explorer.TaskSupervisor]], id: Explorer.TaskSupervisor),
Explorer.Indexer,
Explorer.Chain.Statistics.Server,
Explorer.ExchangeRates
]
end
defp children do
[
supervisor(Explorer.Repo, []),
{Task.Supervisor, name: Explorer.ExchangeRateTaskSupervisor}
Explorer.Repo,
supervisor(
Task.Supervisor,
[[name: Explorer.ExchangeRateTaskSupervisor]],
id: Explorer.ExchangeRateTaskSupervisor
)
]
end
end

@ -5,18 +5,9 @@ defmodule Explorer.Chain do
import Ecto.Query, only: [from: 2, order_by: 2, preload: 2, where: 2, where: 3]
alias Explorer.Chain.{
Address,
Block,
BlockTransaction,
InternalTransaction,
Log,
Receipt,
Transaction,
Wei
}
alias Explorer.Repo.NewRelic, as: Repo
alias Ecto.Multi
alias Explorer.Chain.{Address, Block, InternalTransaction, Log, Receipt, Transaction, Wei}
alias Explorer.Repo
# Types
@ -47,6 +38,10 @@ defmodule Explorer.Chain do
# Functions
def block_count do
Repo.one(from(b in Block, select: count(b.id)))
end
@doc """
Finds all `t:Explorer.Chain.Transaction.t/0` in the `t:Explorer.Chain.Block.t/0`.
@ -85,12 +80,11 @@ defmodule Explorer.Chain do
def block_to_transaction_count(%Block{id: block_id}) do
query =
from(
block_transaction in BlockTransaction,
join: block in assoc(block_transaction, :block),
where: block_transaction.block_id == ^block_id
transaction in Transaction,
where: transaction.block_id == ^block_id
)
Repo.aggregate(query, :count, :block_id)
Repo.aggregate(query, :count, :id)
end
@doc """
@ -173,6 +167,13 @@ defmodule Explorer.Chain do
address_to_transactions(address, Keyword.put(options, :direction, :from))
end
@doc """
TODO
"""
def get_latest_block do
Repo.one(from(b in Block, limit: 1, order_by: [desc: b.number]))
end
@doc """
The `t:Explorer.Chain.Transaction.t/0` `gas_price` of the `transaction` in `unit`.
"""
@ -270,6 +271,25 @@ defmodule Explorer.Chain do
end
end
@doc """
TODO
"""
def import_blocks(raw_blocks, internal_transactions, receipts) do
{blocks, transactions} = extract_blocks(raw_blocks)
Multi.new()
|> Multi.run(:blocks, &insert_blocks(&1, blocks))
|> Multi.run(:transactions, &insert_transactions(&1, transactions))
|> Multi.run(:internal, &insert_internal(&1, internal_transactions))
|> Multi.run(:receipts, &insert_receipts(&1, receipts))
|> Multi.run(:logs, &insert_logs(&1))
|> Repo.transaction()
end
def internal_transaction_count do
Repo.one(from(t in InternalTransaction, select: count(t.id)))
end
@doc """
The last `t:Explorer.Chain.Transaction.t/0` `id`.
"""
@ -313,6 +333,10 @@ defmodule Explorer.Chain do
|> Repo.paginate(pagination)
end
def log_count do
Repo.one(from(l in Log, select: count(l.id)))
end
@doc """
The maximum `t:Explorer.Chain.Block.t/0` `number`
"""
@ -321,6 +345,28 @@ defmodule Explorer.Chain do
Repo.aggregate(Block, :max, :number)
end
@doc """
TODO
"""
def missing_block_numbers do
{:ok, {_, missing_count, missing_ranges}} =
Repo.transaction(fn ->
query = from(b in Block, select: b.number, order_by: [asc: b.number])
query
|> Repo.stream(max_rows: 1000)
|> Enum.reduce({-1, 0, []}, fn
num, {prev, missing_count, acc} when prev + 1 == num ->
{num, missing_count, acc}
num, {prev, missing_count, acc} ->
{num, missing_count + (num - prev - 1), [{prev + 1, num - 1} | acc]}
end)
end)
{missing_count, missing_ranges}
end
@doc """
Finds `t:Explorer.Chain.Block.t/0` with `number`
"""
@ -335,6 +381,10 @@ defmodule Explorer.Chain do
end
end
def receipt_count do
Repo.one(from(r in Receipt, select: count(r.id)))
end
@doc """
`t:Explorer.Chain.Transaction/0`s to `address`.
@ -523,6 +573,18 @@ defmodule Explorer.Chain do
from(q in query, order_by: [desc: q.inserted_at, desc: q.id])
end
defp extract_blocks(raw_blocks) do
timestamps = timestamps()
{blocks, transactions} =
Enum.reduce(raw_blocks, {[], []}, fn raw_block, {blocks_acc, trans_acc} ->
{:ok, block, transactions} = Block.extract(raw_block, timestamps)
{[block | blocks_acc], trans_acc ++ transactions}
end)
{Enum.reverse(blocks), transactions}
end
defp for_parent_transaction(query, hash) when is_binary(hash) do
from(
child in query,
@ -531,6 +593,97 @@ defmodule Explorer.Chain do
)
end
defp insert_blocks(%{}, blocks) do
{_, inserted_blocks} =
Repo.safe_insert_all(
Block,
blocks,
returning: [:id, :number],
on_conflict: :replace_all,
conflict_target: :number
)
{:ok, inserted_blocks}
end
defp insert_internal(%{transactions: transactions}, internal_transactions) do
timestamps = timestamps()
internals =
Enum.flat_map(transactions, fn %{hash: hash, id: id} ->
case Map.fetch(internal_transactions, hash) do
{:ok, traces} ->
Enum.map(traces, &InternalTransaction.extract(&1, id, timestamps))
:error ->
[]
end
end)
{_, inserted} = Repo.safe_insert_all(InternalTransaction, internals, on_conflict: :nothing)
{:ok, inserted}
end
defp insert_logs(%{receipts: %{inserted: receipts, logs: logs_map}}) do
logs_to_insert =
Enum.reduce(receipts, [], fn receipt, acc ->
case Map.fetch(logs_map, receipt.transaction_id) do
{:ok, []} ->
acc
{:ok, [_ | _] = logs} ->
logs = Enum.map(logs, &Map.put(&1, :receipt_id, receipt.id))
logs ++ acc
end
end)
{_, inserted_logs} = Repo.safe_insert_all(Log, logs_to_insert, returning: [:id])
{:ok, inserted_logs}
end
defp insert_receipts(%{transactions: transactions}, raw_receipts) do
timestamps = timestamps()
{receipts_to_insert, logs_map} =
Enum.reduce(transactions, {[], %{}}, fn trans, {receipts_acc, logs_acc} ->
case Map.fetch(raw_receipts, trans.hash) do
{:ok, raw_receipt} ->
{receipt, logs} = Receipt.extract(raw_receipt, trans.id, timestamps)
{[receipt | receipts_acc], Map.put(logs_acc, trans.id, logs)}
:error ->
{receipts_acc, logs_acc}
end
end)
{_, inserted_receipts} =
Repo.safe_insert_all(
Receipt,
receipts_to_insert,
returning: [:id, :transaction_id]
)
{:ok, %{inserted: inserted_receipts, logs: logs_map}}
end
defp insert_transactions(%{blocks: blocks}, transactions) do
blocks_map = for block <- blocks, into: %{}, do: {block.number, block}
transactions =
for transaction <- transactions do
%{id: id} = Map.fetch!(blocks_map, transaction.block_number)
transaction
|> Map.put(:block_id, id)
|> Map.delete(:block_number)
end
{_, inserted} = Repo.safe_insert_all(Transaction, transactions, returning: [:id, :hash])
{:ok, inserted}
end
defp join_association(query, association, necessity) when is_atom(association) do
case necessity do
:optional ->
@ -556,6 +709,11 @@ defmodule Explorer.Chain do
)
end
defp timestamps do
now = Ecto.DateTime.utc()
%{inserted_at: now, updated_at: now}
end
defp transaction_hash_to_logs(transaction_hash, options)
when is_binary(transaction_hash) and is_list(options) do
lower_transaction_hash = String.downcase(transaction_hash)

@ -7,7 +7,8 @@ defmodule Explorer.Chain.Block do
use Explorer.Schema
alias Explorer.Chain.{BlockTransaction, Gas, Hash, Transaction}
alias Ecto.Changeset
alias Explorer.Chain.{Gas, Hash, Transaction}
# Types
@ -24,7 +25,6 @@ defmodule Explorer.Chain.Block do
@type block_number :: non_neg_integer()
@typedoc """
* `block_transactions` - The `t:Explorer.Chain.BlockTransaction.t/0`s joins this block to its `transactions`
* `difficulty` - how hard the block was to mine.
* `gas_limit` - If the total number of gas used by the computation spawned by the transaction, including the original
message and any sub-messages that may be triggered, is less than or equal to the gas limit, then the transaction
@ -43,7 +43,6 @@ defmodule Explorer.Chain.Block do
* `transactions` - the `t:Explorer.Chain.Transaction.t/0` in this block.
"""
@type t :: %__MODULE__{
block_transactions: %Ecto.Association.NotLoaded{} | [BlockTransaction.t()],
difficulty: difficulty(),
gas_limit: Gas.t(),
gas_used: Gas.t(),
@ -73,12 +72,10 @@ defmodule Explorer.Chain.Block do
timestamps()
has_many(:block_transactions, BlockTransaction)
many_to_many(:transactions, Transaction, join_through: "block_transactions")
has_many(:transactions, Transaction)
end
@required_attrs ~w(number hash parent_hash nonce miner difficulty
total_difficulty size gas_limit gas_used timestamp)a
@required_attrs ~w(difficulty gas_limit gas_used hash miner nonce number parent_hash size timestamp total_difficulty)a
@doc false
def changeset(%__MODULE__{} = block, attrs) do
@ -87,7 +84,13 @@ defmodule Explorer.Chain.Block do
|> validate_required(@required_attrs)
|> update_change(:hash, &String.downcase/1)
|> unique_constraint(:hash)
|> cast_assoc(:transactions)
end
@doc false
def extract(raw_block, %{} = timestamps) do
raw_block
|> extract_block(timestamps)
|> extract_transactions(raw_block["transactions"], timestamps)
end
def null, do: %__MODULE__{number: -1, timestamp: :calendar.universal_time()}
@ -95,4 +98,43 @@ defmodule Explorer.Chain.Block do
def latest(query) do
query |> order_by(desc: :number)
end
## Private Functions
defp extract_block(raw_block, %{} = timestamps) do
attrs = %{
hash: raw_block["hash"],
number: raw_block["number"],
gas_used: raw_block["gasUsed"],
timestamp: raw_block["timestamp"],
parent_hash: raw_block["parentHash"],
miner: raw_block["miner"],
difficulty: raw_block["difficulty"],
total_difficulty: raw_block["totalDifficulty"],
size: raw_block["size"],
gas_limit: raw_block["gasLimit"],
nonce: raw_block["nonce"] || "0"
}
case changeset(%__MODULE__{}, attrs) do
%Changeset{valid?: true, changes: changes} -> {:ok, Map.merge(changes, timestamps)}
%Changeset{valid?: false, errors: errors} -> {:error, {:block, errors}}
end
end
defp extract_transactions({:ok, block_changes}, raw_transactions, %{} = timestamps) do
raw_transactions
|> Enum.map(&Transaction.decode(&1, block_changes.number, timestamps))
|> Enum.reduce_while({:ok, block_changes, []}, fn
{:ok, trans_changes}, {:ok, block, acc} ->
{:cont, {:ok, block, [trans_changes | acc]}}
{:error, reason}, _ ->
{:halt, {:error, {:transaction, reason}}}
end)
end
defp extract_transactions({:error, reason}, _transactions, _timestamps) do
{:error, reason}
end
end

@ -1,25 +0,0 @@
defmodule Explorer.Chain.BlockTransaction do
@moduledoc "Connects a Block to a Transaction"
use Explorer.Schema
alias Explorer.Chain.{Block, Transaction}
@primary_key false
schema "block_transactions" do
belongs_to(:block, Block)
belongs_to(:transaction, Transaction, primary_key: true)
timestamps()
end
@required_attrs ~w(block_id transaction_id)a
def changeset(%__MODULE__{} = block_transaction, attrs \\ %{}) do
block_transaction
|> cast(attrs, @required_attrs)
|> validate_required(@required_attrs)
|> cast_assoc(:block)
|> cast_assoc(:transaction)
|> unique_constraint(:transaction_id, name: :block_transactions_transaction_id_index)
end
end

@ -1,21 +0,0 @@
defmodule Explorer.Chain.FromAddress do
@moduledoc false
use Explorer.Schema
alias Explorer.Chain.{Address, Transaction}
@primary_key false
schema "from_addresses" do
belongs_to(:address, Address)
belongs_to(:transaction, Transaction, primary_key: true)
timestamps()
end
def changeset(%__MODULE__{} = to_address, attrs \\ %{}) do
to_address
|> cast(attrs, [:transaction_id, :address_id])
|> unique_constraint(:transaction_id, name: :from_addresses_transaction_id_index)
end
end

@ -17,6 +17,7 @@ defmodule Explorer.Chain.InternalTransaction do
@typedoc """
* `call_type` - the type of call
* `from_address` - the source of the `value`
* `from_address_hash` - hash of the source of the `value`
* `from_address_id` - foreign key for `from_address`
* `gas` - the amount of gas allowed
* `gas_used` - the amount of gas used
@ -24,6 +25,7 @@ defmodule Explorer.Chain.InternalTransaction do
* `input` - input bytes to the call
* `output` - output bytes from the call
* `to_address` - the sink of the `value`
* `to_address_hash` - hash of the sink of the `value`
* `to_address_id` - foreign key for `to_address`
* `trace_address` - list of traces
* `transaction` - transaction in which this transaction occured
@ -33,6 +35,7 @@ defmodule Explorer.Chain.InternalTransaction do
@type t :: %__MODULE__{
call_type: call_type,
from_address: %Ecto.Association.NotLoaded{} | Address.t(),
from_address_hash: Address.hash(),
from_address_id: non_neg_integer(),
gas: Gas.t(),
gas_used: Gas.t(),
@ -40,6 +43,7 @@ defmodule Explorer.Chain.InternalTransaction do
input: String.t(),
output: String.t(),
to_address: %Ecto.Association.NotLoaded{} | Address.t(),
to_address_hash: Address.hash(),
to_address_id: non_neg_integer(),
trace_address: [non_neg_integer()],
transaction: %Ecto.Association.NotLoaded{} | Transaction.t(),
@ -49,11 +53,13 @@ defmodule Explorer.Chain.InternalTransaction do
schema "internal_transactions" do
field(:call_type, :string)
field(:from_address_hash, :string)
field(:gas, :decimal)
field(:gas_used, :decimal)
field(:index, :integer)
field(:input, :string)
field(:output, :string)
field(:to_address_hash, :string)
field(:trace_address, {:array, :integer})
field(:value, :decimal)
@ -77,4 +83,35 @@ defmodule Explorer.Chain.InternalTransaction do
|> foreign_key_constraint(:from_address_id)
|> unique_constraint(:transaction_id, name: :internal_transactions_transaction_id_index_index)
end
def extract(trace, transaction_id, %{} = timestamps) do
%{
transaction_id: transaction_id,
index: 0,
call_type: trace["action"]["callType"] || trace["type"],
to_address_hash: to_address(trace),
from_address_hash: trace |> from_address(),
trace_address: trace["traceAddress"],
value: trace["action"]["value"],
gas: trace["action"]["gas"],
gas_used: gas_used(trace),
input: trace["action"]["input"],
output: trace["result"]["output"],
# error: trace["error"],
inserted_at: Map.fetch!(timestamps, :inserted_at),
updated_at: Map.fetch!(timestamps, :updated_at)
}
end
defp from_address(%{"action" => %{"from" => address}}), do: address
defp gas_used(%{"result" => %{"gasUsed" => gas}}), do: gas
defp gas_used(%{"error" => _error}), do: 0
defp to_address(%{"action" => %{"to" => address}})
when not is_nil(address),
do: address
defp to_address(%{"result" => %{"address" => address}}), do: address
defp to_address(%{"error" => _error}), do: nil
end

@ -5,23 +5,27 @@ defmodule Explorer.Chain.Receipt do
alias Explorer.Chain.{Log, Transaction}
@required_attrs ~w(cumulative_gas_used gas_used status index)a
@optional_attrs ~w(transaction_id)a
@required_attrs ~w(cumulative_gas_used gas_used status index)a
@allowed_attrs @optional_attrs ++ @required_attrs
schema "receipts" do
belongs_to(:transaction, Transaction)
has_many(:logs, Log)
field(:cumulative_gas_used, :decimal)
field(:gas_used, :decimal)
field(:status, :integer)
field(:index, :integer)
belongs_to(:transaction, Transaction)
has_many(:logs, Log)
timestamps()
end
# Functions
def changeset(%__MODULE__{} = transaction_receipt, attrs \\ %{}) do
transaction_receipt
|> cast(attrs, @required_attrs)
|> cast(attrs, @optional_attrs)
|> cast(attrs, @allowed_attrs)
|> cast_assoc(:transaction)
|> cast_assoc(:logs)
|> validate_required(@required_attrs)
@ -29,5 +33,43 @@ defmodule Explorer.Chain.Receipt do
|> unique_constraint(:transaction_id)
end
def extract(raw_receipt, transaction_id, %{} = timestamps) do
logs =
raw_receipt
|> Map.fetch!("logs")
|> Enum.map(&extract_log(&1, timestamps))
receipt = %{
transaction_id: transaction_id,
index: raw_receipt["transactionIndex"],
cumulative_gas_used: raw_receipt["cumulativeGasUsed"],
gas_used: raw_receipt["gasUsed"],
status: raw_receipt["status"],
inserted_at: Map.fetch!(timestamps, :inserted_at),
updated_at: Map.fetch!(timestamps, :updated_at)
}
{receipt, logs}
end
def null, do: %__MODULE__{}
## Private Functions
defp extract_log(log, %{} = timestamps) do
# address = Address.find_or_create_by_hash(log["address"])
%{
# address_id: 0, # TODO
index: log["logIndex"],
data: log["data"],
type: log["type"],
first_topic: log["topics"] |> Enum.at(0),
second_topic: log["topics"] |> Enum.at(1),
third_topic: log["topics"] |> Enum.at(2),
fourth_topic: log["topics"] |> Enum.at(3),
inserted_at: Map.fetch!(timestamps, :inserted_at),
updated_at: Map.fetch!(timestamps, :updated_at)
}
end
end

@ -25,8 +25,7 @@ defmodule Explorer.Chain.Statistics do
@transaction_count_query """
SELECT count(transactions.id)
FROM transactions
JOIN block_transactions ON block_transactions.transaction_id = transactions.id
JOIN blocks ON blocks.id = block_transactions.block_id
JOIN blocks ON blocks.id = transactions.block_id
WHERE blocks.timestamp > NOW() - interval '1 day'
"""

@ -7,6 +7,10 @@ defmodule Explorer.Chain.Statistics.Server do
@interval 1_000
def child_spec(_) do
Supervisor.Spec.worker(__MODULE__, [true])
end
@spec fetch() :: Statistics.t()
def fetch do
case GenServer.whereis(__MODULE__) do

@ -1,20 +0,0 @@
defmodule Explorer.Chain.ToAddress do
@moduledoc false
use Explorer.Schema
alias Explorer.Chain.{Address, Transaction}
@primary_key false
schema "to_addresses" do
belongs_to(:address, Address)
belongs_to(:transaction, Transaction, primary_key: true)
timestamps()
end
def changeset(%__MODULE__{} = to_address, attrs \\ %{}) do
to_address
|> cast(attrs, [:transaction_id, :address_id])
|> unique_constraint(:transaction_id, name: :to_addresses_transaction_id_index)
end
end

@ -3,7 +3,8 @@ defmodule Explorer.Chain.Transaction do
use Explorer.Schema
alias Explorer.Chain.{Address, Block, BlockTransaction, Hash, InternalTransaction, Receipt, Wei}
alias Ecto.Changeset
alias Explorer.Chain.{Address, Block, Hash, InternalTransaction, Receipt, Wei}
# Constants
@ -76,8 +77,8 @@ defmodule Explorer.Chain.Transaction do
@type wei_per_gas :: non_neg_integer()
@typedoc """
* `block_transaction` - joins this transaction to its `block`
* `block` - the block in which this transaction was mined/validated
* `block_id` - `block` foreign key
* `from_address` - the source of `value`
* `from_address_id` - foreign key of `from_address`
* `gas` - Gas provided by the sender
@ -100,7 +101,7 @@ defmodule Explorer.Chain.Transaction do
"""
@type t :: %__MODULE__{
block: %Ecto.Association.NotLoaded{} | Block.t(),
block_transaction: %Ecto.Association.NotLoaded{} | BlockTransaction.t(),
block_id: non_neg_integer,
from_address: %Ecto.Association.NotLoaded{} | Address.t(),
from_address_id: non_neg_integer(),
gas: Gas.t(),
@ -139,8 +140,7 @@ defmodule Explorer.Chain.Transaction do
timestamps()
has_one(:block_transaction, BlockTransaction)
has_one(:block, through: [:block_transaction, :block])
belongs_to(:block, Block)
belongs_to(:from_address, Address)
has_many(:internal_transactions, InternalTransaction)
has_one(:receipt, Receipt)
@ -157,5 +157,33 @@ defmodule Explorer.Chain.Transaction do
|> unique_constraint(:hash)
end
def decode(raw_transaction, block_number, %{} = timestamps) do
attrs = %{
hash: raw_transaction["hash"],
value: raw_transaction["value"],
gas: raw_transaction["gas"],
gas_price: raw_transaction["gasPrice"],
input: raw_transaction["input"],
nonce: raw_transaction["nonce"],
public_key: raw_transaction["publicKey"],
r: raw_transaction["r"],
s: raw_transaction["s"],
standard_v: raw_transaction["standardV"],
transaction_index: raw_transaction["transactionIndex"],
v: raw_transaction["v"]
}
case changeset(%__MODULE__{}, attrs) do
%Changeset{valid?: true, changes: changes} ->
{:ok,
changes
|> Map.put(:block_number, block_number)
|> Map.merge(timestamps)}
%Changeset{valid?: false, errors: errors} ->
{:error, errors}
end
end
def null, do: %__MODULE__{}
end

@ -0,0 +1,287 @@
defmodule Explorer.ETH do
@moduledoc """
Ethereum JSONRPC client.
## Configuration
Configuration for parity URLs can be provided with the
following mix config:
config :explorer, :eth_client,
url: "https://sokol.poa.network",
trace_url: "https://sokol-trace.poa.network",
http: [recv_timeout: 60_000, timeout: 60_000, hackney: [pool: :eth]]
Note: the tracing node URL is provided separately from `:url`, via
`:trace_url`. The trace URL and is used for `fetch_internal_transactions`,
which is only a supported method on tracing nodes. The `:http` option is
passed directly to the HTTP library (`HTTPoison`), which forwards the
options down to `:hackney`.
"""
require Logger
def child_spec(_opts) do
:hackney_pool.child_spec(:eth, recv_timeout: 60_000, timeout: 60_000, max_connections: 1000)
end
@doc """
Creates a filter subscription that can be polled for retreiving new blocks.
"""
def listen_for_new_blocks do
id = DateTime.utc_now() |> DateTime.to_unix()
request = %{
"id" => id,
"jsonrpc" => "2.0",
"method" => "eth_newBlockFilter",
"params" => []
}
json_rpc(request, config(:url))
end
@doc """
Lists changes for a given filter subscription.
"""
def check_for_updates(filter_id) do
request = %{
"id" => filter_id,
"jsonrpc" => "2.0",
"method" => "eth_getFilterChanges",
"params" => [filter_id]
}
json_rpc(request, config(:url))
end
@doc """
Fetches blocks by block hashes.
Transaction data is included for each block.
"""
def fetch_blocks_by_hash(block_hashes) do
batched_requests =
for block_hash <- block_hashes do
%{
"id" => block_hash,
"jsonrpc" => "2.0",
"method" => "eth_getBlockByHash",
"params" => [block_hash, true]
}
end
json_rpc(batched_requests, config(:url))
end
def decode_int(hex) do
{"0x", base_16} = String.split_at(hex, 2)
String.to_integer(base_16, 16)
end
def decode_time(field) do
field |> decode_int() |> Timex.from_unix()
end
def fetch_transaction_receipts(hashes) when is_list(hashes) do
hashes
|> Enum.map(fn hash ->
%{
"id" => hash,
"jsonrpc" => "2.0",
"method" => "eth_getTransactionReceipt",
"params" => [hash]
}
end)
|> json_rpc(config(:url))
|> handle_receipts()
end
defp handle_receipts({:ok, results}) do
results_map =
Enum.into(results, %{}, fn %{"id" => hash, "result" => receipt} ->
{hash,
Map.merge(receipt, %{
"transactionHash" => String.downcase(receipt["transactionHash"]),
"transactionIndex" => decode_int(receipt["transactionIndex"]),
"cumulativeGasUsed" => decode_int(receipt["cumulativeGasUsed"]),
"gasUsed" => decode_int(receipt["gasUsed"]),
"status" => decode_int(receipt["status"]),
"logs" =>
Enum.map(receipt["logs"], fn log ->
Map.merge(log, %{"logIndex" => decode_int(log["logIndex"])})
end)
})}
end)
{:ok, results_map}
end
defp handle_receipts({:error, reason}) do
{:error, reason}
end
def fetch_internal_transactions(hashes) when is_list(hashes) do
hashes
|> Enum.map(fn hash ->
%{
"id" => hash,
"jsonrpc" => "2.0",
"method" => "trace_replayTransaction",
"params" => [hash, ["trace"]]
}
end)
|> json_rpc(config(:trace_url))
|> handle_internal_transactions()
end
defp handle_internal_transactions({:ok, results}) do
results_map =
Enum.into(results, %{}, fn
%{"error" => error} ->
throw({:error, error})
%{"id" => hash, "result" => %{"trace" => traces}} ->
{hash, Enum.map(traces, &decode_trace(&1))}
end)
{:ok, results_map}
catch
{:error, reason} -> {:error, reason}
end
defp handle_internal_transactions({:error, reason}) do
{:error, reason}
end
defp decode_trace(%{"action" => action} = trace) do
trace
|> Map.merge(%{
"action" =>
Map.merge(action, %{
"value" => decode_int(action["value"]),
"gas" => decode_int(action["gas"])
})
})
|> put_gas_used()
end
defp put_gas_used(%{"error" => _} = trace), do: trace
defp put_gas_used(%{"result" => %{"gasUsed" => gas}} = trace) do
put_in(trace, ["result", "gasUsed"], decode_int(gas))
end
@doc """
Fetches blocks by block number range.
"""
def fetch_blocks_by_range(block_start, block_end) do
block_start
|> build_batch_get_block_by_number(block_end)
|> json_rpc(config(:url))
|> handle_get_block_by_number(block_start, block_end)
end
defp build_batch_get_block_by_number(block_start, block_end) do
for current <- block_start..block_end do
%{
"id" => current,
"jsonrpc" => "2.0",
"method" => "eth_getBlockByNumber",
"params" => [int_to_hash_string(current), true]
}
end
end
defp handle_get_block_by_number({:ok, results}, block_start, block_end) do
{blocks, next} =
Enum.reduce(results, {[], :more}, fn
%{"result" => nil}, {blocks, _} -> {blocks, :end_of_chain}
%{"result" => %{} = block}, {blocks, next} -> {[block | blocks], next}
end)
{:ok, next, decode_blocks(blocks), {block_start, block_end}}
end
defp handle_get_block_by_number({:error, reason}, block_start, block_end) do
{:error, reason, {block_start, block_end}}
end
defp decode_blocks(blocks) do
Enum.map(blocks, fn block ->
Map.merge(block, %{
"hash" => String.downcase(block["hash"]),
"number" => decode_int(block["number"]),
"gasUsed" => decode_int(block["gasUsed"]),
"timestamp" => decode_time(block["timestamp"]),
"difficulty" => decode_int(block["difficulty"]),
"totalDifficulty" => decode_int(block["totalDifficulty"]),
"size" => decode_int(block["size"]),
"gasLimit" => decode_int(block["gasLimit"]),
"transactions" => decode_transactions(block["transactions"])
})
end)
end
defp decode_transactions(transactions) do
Enum.map(transactions, fn transaction ->
Map.merge(transaction, %{
"hash" => String.downcase(transaction["hash"]),
"value" => decode_int(transaction["value"]),
"gas" => decode_int(transaction["gas"]),
"gasPrice" => decode_int(transaction["gasPrice"]),
"nonce" => decode_int(transaction["nonce"])
})
end)
end
defp json_rpc(payload, url) do
json = encode_json(payload)
headers = [{"Content-Type", "application/json"}]
case HTTPoison.post(url, json, headers, config(:http)) do
{:ok, %HTTPoison.Response{body: body, status_code: code}} ->
body |> decode_json(payload) |> handle_response(code)
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, reason}
end
end
defp handle_response(resp, 200) do
case resp do
[%{} | _] = batch_resp -> {:ok, batch_resp}
%{"error" => error} -> {:error, error}
%{"result" => result} -> {:ok, result}
end
end
defp handle_response(resp, _status) do
{:error, resp}
end
defp config(key) do
:explorer
|> Application.fetch_env!(:eth_client)
|> Keyword.fetch!(key)
end
defp encode_json(data), do: Jason.encode_to_iodata!(data)
defp decode_json(body, posted_payload) do
Jason.decode!(body)
rescue
Jason.DecodeError ->
Logger.error("""
failed to decode json payload:
#{inspect(body)}
#{inspect(posted_payload)}
""")
raise("bad jason")
end
defp int_to_hash_string(number), do: "0x" <> Integer.to_string(number, 16)
end

@ -1,5 +0,0 @@
defmodule Explorer.ExqNodeIdentifier do
@behaviour Exq.NodeIdentifier.Behaviour
@moduledoc "Configure Exq with the current dyno name"
def node_id, do: System.get_env("DYNO")
end

@ -1,17 +0,0 @@
defmodule Explorer.BalanceImporter do
@moduledoc "Imports a balance for a given address."
alias Explorer.{Chain, Ethereum}
def import(hash) do
encoded_balance = Ethereum.download_balance(hash)
persist_balance(hash, encoded_balance)
end
defp persist_balance(hash, encoded_balance) when is_binary(hash) do
decoded_balance = Ethereum.decode_integer_field(encoded_balance)
Chain.update_balance(hash, decoded_balance)
end
end

@ -1,81 +0,0 @@
defmodule Explorer.BlockImporter do
@moduledoc "Imports a block."
import Ecto.Query
import Ethereumex.HttpClient, only: [eth_get_block_by_number: 2]
alias Explorer.{BlockImporter, Ethereum}
alias Explorer.Chain.Block
alias Explorer.Repo.NewRelic, as: Repo
alias Explorer.Workers.ImportTransaction
def import(raw_block) when is_map(raw_block) do
changes = extract_block(raw_block)
block = changes.hash |> find()
if is_nil(block.id), do: block |> Block.changeset(changes) |> Repo.insert()
Enum.map(raw_block["transactions"], &ImportTransaction.perform/1)
end
@dialyzer {:nowarn_function, import: 1}
def import("pending") do
raw_block = download_block("pending")
Enum.map(raw_block["transactions"], &ImportTransaction.perform_later/1)
end
@dialyzer {:nowarn_function, import: 1}
def import(block_number) do
block_number |> download_block() |> BlockImporter.import()
end
def find(hash) do
query =
from(
b in Block,
where: fragment("lower(?)", b.hash) == ^String.downcase(hash),
limit: 1
)
query |> Repo.one() || %Block{}
end
@dialyzer {:nowarn_function, download_block: 1}
def download_block(block_number) do
{:ok, block} =
block_number
|> encode_number()
|> eth_get_block_by_number(true)
block
end
def extract_block(raw_block) do
%{
hash: raw_block["hash"],
number: raw_block["number"] |> Ethereum.decode_integer_field(),
gas_used: raw_block["gasUsed"] |> Ethereum.decode_integer_field(),
timestamp: raw_block["timestamp"] |> Ethereum.decode_time_field(),
parent_hash: raw_block["parentHash"],
miner: raw_block["miner"],
difficulty: raw_block["difficulty"] |> Ethereum.decode_integer_field(),
total_difficulty: raw_block["totalDifficulty"] |> Ethereum.decode_integer_field(),
size: raw_block["size"] |> Ethereum.decode_integer_field(),
gas_limit: raw_block["gasLimit"] |> Ethereum.decode_integer_field(),
nonce: raw_block["nonce"] || "0"
}
end
defp encode_number("latest"), do: "latest"
defp encode_number("earliest"), do: "earliest"
defp encode_number("pending"), do: "pending"
defp encode_number("0x" <> number) when is_binary(number), do: number
defp encode_number(number) when is_binary(number) do
number
|> String.to_integer()
|> encode_number()
end
defp encode_number(number), do: "0x" <> Integer.to_string(number, 16)
end

@ -1,80 +0,0 @@
defmodule Explorer.InternalTransactionImporter do
@moduledoc "Imports a transaction's internal transactions given its hash."
import Ecto.Query
alias Explorer.{Chain, Ethereum, EthereumexExtensions, Repo}
alias Explorer.Chain.{InternalTransaction, Transaction}
@dialyzer {:nowarn_function, import: 1}
def import(hash) do
transaction = find_transaction(hash)
hash
|> download_trace
|> extract_attrs
|> persist_internal_transactions(transaction)
end
@dialyzer {:nowarn_function, download_trace: 1}
defp download_trace(hash) do
EthereumexExtensions.trace_transaction(hash)
end
defp find_transaction(hash) do
query =
from(
t in Transaction,
where: fragment("lower(?)", t.hash) == ^String.downcase(hash),
limit: 1
)
Repo.one!(query)
end
@dialyzer {:nowarn_function, extract_attrs: 1}
defp extract_attrs(attrs) do
trace = attrs["trace"]
trace |> Enum.with_index() |> Enum.map(&extract_trace/1)
end
def extract_trace({trace, index}) do
%{
index: index,
call_type: trace["action"]["callType"] || trace["type"],
to_address_id: trace |> to_address() |> address_id(),
from_address_id: trace |> from_address() |> address_id(),
trace_address: trace["traceAddress"],
value: trace["action"]["value"] |> Ethereum.decode_integer_field(),
gas: trace["action"]["gas"] |> Ethereum.decode_integer_field(),
gas_used: trace["result"]["gasUsed"] |> Ethereum.decode_integer_field(),
input: trace["action"]["input"],
output: trace["result"]["output"]
}
end
defp to_address(%{"action" => %{"to" => address}})
when not is_nil(address),
do: address
defp to_address(%{"result" => %{"address" => address}}), do: address
defp from_address(%{"action" => %{"from" => address}}), do: address
@dialyzer {:nowarn_function, persist_internal_transactions: 2}
defp persist_internal_transactions(traces, transaction) do
Enum.map(traces, fn trace ->
trace = Map.merge(trace, %{transaction_id: transaction.id})
%InternalTransaction{}
|> InternalTransaction.changeset(trace)
|> Repo.insert()
end)
end
defp address_id(hash) do
{:ok, address} = Chain.ensure_hash_address(hash)
address.id
end
end

@ -1,79 +0,0 @@
defmodule Explorer.ReceiptImporter do
@moduledoc "Imports a transaction receipt given a transaction hash."
import Ecto.Query
import Ethereumex.HttpClient, only: [eth_get_transaction_receipt: 1]
alias Explorer.{Chain, Repo}
alias Explorer.Chain.{Receipt, Transaction}
def import(hash) do
transaction = hash |> find_transaction()
hash
|> download_receipt()
|> extract_receipt()
|> Map.put(:transaction_id, transaction.id)
|> save_receipt()
end
@dialyzer {:nowarn_function, download_receipt: 1}
defp download_receipt(hash) do
{:ok, receipt} = eth_get_transaction_receipt(hash)
receipt || %{}
end
defp find_transaction(hash) do
query =
from(
transaction in Transaction,
left_join: receipt in assoc(transaction, :receipt),
where: fragment("lower(?)", transaction.hash) == ^hash,
where: is_nil(receipt.id),
limit: 1
)
Repo.one(query) || Transaction.null()
end
defp save_receipt(receipt) do
unless is_nil(receipt.transaction_id) do
%Receipt{}
|> Receipt.changeset(receipt)
|> Repo.insert()
end
end
defp extract_receipt(receipt) do
logs = receipt["logs"] || []
%{
index: receipt["transactionIndex"] |> decode_integer_field(),
cumulative_gas_used: receipt["cumulativeGasUsed"] |> decode_integer_field(),
gas_used: receipt["gasUsed"] |> decode_integer_field(),
status: receipt["status"] |> decode_integer_field(),
logs: logs |> Enum.map(&extract_log/1)
}
end
defp extract_log(log) do
{:ok, address} = Chain.ensure_hash_address(log["address"])
%{
address_id: address.id,
index: log["logIndex"] |> decode_integer_field(),
data: log["data"],
type: log["type"],
first_topic: log["topics"] |> Enum.at(0),
second_topic: log["topics"] |> Enum.at(1),
third_topic: log["topics"] |> Enum.at(2),
fourth_topic: log["topics"] |> Enum.at(3)
}
end
defp decode_integer_field("0x" <> hex) when is_binary(hex) do
String.to_integer(hex, 16)
end
defp decode_integer_field(field), do: field
end

@ -1,142 +0,0 @@
defmodule Explorer.TransactionImporter do
@moduledoc "Imports a transaction given a unique hash."
import Ecto.Query
import Ethereumex.HttpClient, only: [eth_get_transaction_by_hash: 1]
alias Explorer.{Chain, Ethereum, Repo, BalanceImporter}
alias Explorer.Chain.{Block, BlockTransaction, Transaction}
def import(hash) when is_binary(hash) do
hash |> download_transaction() |> persist_transaction()
end
def import(raw_transaction) when is_map(raw_transaction) do
persist_transaction(raw_transaction)
end
def persist_transaction(raw_transaction) do
found_transaction = raw_transaction["hash"] |> find()
transaction =
case is_nil(found_transaction.id) do
false ->
found_transaction
true ->
to_address =
raw_transaction
|> to_address()
|> fetch_address()
from_address =
raw_transaction
|> from_address()
|> fetch_address()
changes =
raw_transaction
|> extract_attrs()
|> Map.put(:to_address_id, to_address.id)
|> Map.put(:from_address_id, from_address.id)
found_transaction |> Transaction.changeset(changes) |> Repo.insert!()
end
transaction
|> create_block_transaction(raw_transaction["blockHash"])
refresh_account_balances(raw_transaction)
transaction
end
def find(hash) do
query =
from(
t in Transaction,
where: fragment("lower(?)", t.hash) == ^String.downcase(hash),
limit: 1
)
query |> Repo.one() || %Transaction{}
end
def download_transaction(hash) do
{:ok, payload} = eth_get_transaction_by_hash(hash)
payload
end
def extract_attrs(raw_transaction) do
%{
hash: raw_transaction["hash"],
value: raw_transaction["value"] |> Ethereum.decode_integer_field(),
gas: raw_transaction["gas"] |> Ethereum.decode_integer_field(),
gas_price: raw_transaction["gasPrice"] |> Ethereum.decode_integer_field(),
input: raw_transaction["input"],
nonce: raw_transaction["nonce"] |> Ethereum.decode_integer_field(),
public_key: raw_transaction["publicKey"],
r: raw_transaction["r"],
s: raw_transaction["s"],
standard_v: raw_transaction["standardV"],
transaction_index: raw_transaction["transactionIndex"],
v: raw_transaction["v"]
}
end
def create_block_transaction(transaction, hash) do
query =
from(
t in Block,
where: fragment("lower(?)", t.hash) == ^String.downcase(hash),
limit: 1
)
block = query |> Repo.one()
if block do
changes = %{block_id: block.id, transaction_id: transaction.id}
case Repo.get_by(BlockTransaction, transaction_id: transaction.id) do
nil ->
%BlockTransaction{}
|> BlockTransaction.changeset(changes)
|> Repo.insert()
block_transaction ->
block_transaction
|> BlockTransaction.changeset(%{block_id: block.id})
|> Repo.update()
end
end
transaction
end
def to_address(%{"to" => to}) when not is_nil(to), do: to
def to_address(%{"creates" => creates}) when not is_nil(creates), do: creates
def to_address(hash) when is_bitstring(hash), do: hash
def from_address(%{"from" => from}), do: from
def from_address(hash) when is_bitstring(hash), do: hash
def fetch_address(hash) when is_bitstring(hash) do
{:ok, address} = Chain.ensure_hash_address(hash)
address
end
defp refresh_account_balances(raw_transaction) do
raw_transaction
|> to_address()
|> update_balance()
raw_transaction
|> from_address()
|> update_balance()
end
defp update_balance(address_hash) do
BalanceImporter.import(address_hash)
end
end

@ -0,0 +1,38 @@
defmodule Explorer.Indexer do
@moduledoc """
TODO
"""
alias Explorer.Chain
alias Explorer.Chain.Block
def child_spec(opts) do
%{
id: __MODULE__,
start: {Explorer.Indexer.Supervisor, :start_link, [opts]},
restart: :permanent,
shutdown: 5000,
type: :supervisor
}
end
@doc """
TODO
"""
def last_indexed_block_number do
case Chain.get_latest_block() do
%Block{number: num} -> num
nil -> 0
end
end
@doc """
TODO
"""
def next_block_number do
case last_indexed_block_number() do
0 -> 0
num -> num + 1
end
end
end

@ -0,0 +1,212 @@
defmodule Explorer.Indexer.BlockFetcher do
@moduledoc """
TODO
## Next steps
- after gensis index transition to RT index
"""
use GenServer
require Logger
alias Explorer.{Chain, ETH, Indexer}
alias Explorer.Indexer.Sequence
defstruct ~w(current_block genesis_task subscription_id)a
@batch_size 50
@blocks_concurrency 10
@receipts_batch_size 250
@receipts_concurrency 10
@internal_batch_size 50
@internal_concurrency 4
@polling_interval 20_000
@doc """
Ensures missing block number ranges are chunked into fetchable batches.
"""
def missing_block_numbers do
{count, missing_ranges} = Chain.missing_block_numbers()
chunked_ranges =
Enum.flat_map(missing_ranges, fn
{start, ending} when ending - start <= @batch_size ->
[{start, ending}]
{start, ending} ->
start
|> Stream.iterate(&(&1 + @batch_size))
|> Enum.reduce_while([], fn
chunk_start, acc when chunk_start + @batch_size >= ending ->
{:halt, [{chunk_start, ending} | acc]}
chunk_start, acc ->
{:cont, [{chunk_start, chunk_start + @batch_size - 1} | acc]}
end)
|> Enum.reverse()
end)
{count, chunked_ranges}
end
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def init(_opts) do
send(self(), :index)
:timer.send_interval(15_000, self(), :debug_count)
{:ok, %__MODULE__{current_block: 0, genesis_task: nil, subscription_id: nil}}
end
def handle_info(:index, state) do
{count, missing_ranges} = missing_block_numbers()
current_block = Indexer.next_block_number()
Logger.debug(fn -> "#{count} missed block ranges between genesis and #{current_block}" end)
{:ok, genesis_task} =
Task.start_link(fn ->
stream_import(missing_ranges, current_block)
end)
Process.monitor(genesis_task)
{:noreply, %__MODULE__{state | genesis_task: genesis_task}}
end
def handle_info(:poll, %__MODULE__{subscription_id: subscription_id} = state) do
Process.send_after(self(), :poll, @polling_interval)
with {:ok, blocks} when length(blocks) > 0 <- ETH.check_for_updates(subscription_id) do
Logger.debug(fn -> "Processing #{length(blocks)} new block(s)" end)
# TODO do something with the new blocks
ETH.fetch_blocks_by_hash(blocks)
end
{:noreply, state}
end
def handle_info({:DOWN, _ref, :process, pid, :normal}, %__MODULE__{genesis_task: pid} = state) do
Logger.info(fn -> "Finished index from genesis" end)
{:ok, subscription_id} = ETH.listen_for_new_blocks()
send(self(), :poll)
{:noreply, %__MODULE__{state | genesis_task: nil, subscription_id: subscription_id}}
end
def handle_info(:debug_count, state) do
Logger.debug(fn ->
"""
================================
persisted counts
================================
blocks: #{Chain.block_count()}
internal transactions: #{Chain.internal_transaction_count()}
receipts: #{Chain.receipt_count()}
logs: #{Chain.log_count()}
"""
end)
{:noreply, state}
end
defp stream_import(missing_ranges, current_block) do
{:ok, seq} = Sequence.start_link(missing_ranges, current_block, @batch_size)
seq
|> Sequence.build_stream()
|> Task.async_stream(
fn {block_start, block_end} = range ->
with {:ok, next, blocks, range} <- ETH.fetch_blocks_by_range(block_start, block_end),
:ok <- cap_seq(seq, next, range),
transaction_hashes <- collect_transaction_hashes(blocks),
{:ok, receipts} <- fetch_transaction_receipts(transaction_hashes),
{:ok, internals} <- fetch_internal_transactions(transaction_hashes) do
import_blocks(blocks, internals, receipts, seq, range)
else
{:error, reason} ->
Logger.debug(fn ->
"failed to fetch blocks #{inspect(range)}: #{inspect(reason)}. Retrying"
end)
:ok = Sequence.inject_range(seq, range)
end
end,
max_concurrency: @blocks_concurrency,
timeout: :infinity
)
|> Enum.each(fn {:ok, :ok} -> :ok end)
end
defp cap_seq(seq, :end_of_chain, {_block_start, block_end}) do
Logger.info("Reached end of blockchain #{inspect(block_end)}")
:ok = Sequence.cap(seq)
end
defp cap_seq(_seq, :more, {block_start, block_end}) do
Logger.debug(fn -> "got blocks #{block_start} - #{block_end}" end)
:ok
end
defp fetch_transaction_receipts([]), do: {:ok, %{}}
defp fetch_transaction_receipts(hashes) do
Logger.debug(fn -> "fetching #{length(hashes)} transaction receipts" end)
stream_opts = [max_concurrency: @receipts_concurrency, timeout: :infinity]
hashes
|> Enum.chunk_every(@receipts_batch_size)
|> Task.async_stream(&ETH.fetch_transaction_receipts(&1), stream_opts)
|> Enum.reduce_while({:ok, %{}}, fn
{:ok, {:ok, receipts}}, {:ok, acc} -> {:cont, {:ok, Map.merge(acc, receipts)}}
{:ok, {:error, reason}}, {:ok, _acc} -> {:halt, {:error, reason}}
{:error, reason}, {:ok, _acc} -> {:halt, {:error, reason}}
end)
end
defp fetch_internal_transactions([]), do: {:ok, %{}}
defp fetch_internal_transactions(hashes) do
Logger.debug(fn -> "fetching #{length(hashes)} internal transactions" end)
stream_opts = [max_concurrency: @internal_concurrency, timeout: :infinity]
hashes
|> Enum.chunk_every(@internal_batch_size)
|> Task.async_stream(&ETH.fetch_internal_transactions(&1), stream_opts)
|> Enum.reduce_while({:ok, %{}}, fn
{:ok, {:ok, trans}}, {:ok, acc} -> {:cont, {:ok, Map.merge(acc, trans)}}
{:ok, {:error, reason}}, {:ok, _acc} -> {:halt, {:error, reason}}
{:error, reason}, {:ok, _acc} -> {:halt, {:error, reason}}
end)
end
defp import_blocks(blocks, internal_transactions, receipts, seq, range) do
case Chain.import_blocks(blocks, internal_transactions, receipts) do
{:ok, _results} ->
:ok
{:error, step, reason, _changes} ->
Logger.debug(fn ->
"failed to insert blocks during #{step} #{inspect(range)}: #{inspect(reason)}. Retrying"
end)
:ok = Sequence.inject_range(seq, range)
end
end
defp collect_transaction_hashes(raw_blocks) do
Enum.flat_map(raw_blocks, fn %{"transactions" => transactions} ->
Enum.map(transactions, fn %{"hash" => hash} -> hash end)
end)
end
end

@ -0,0 +1,80 @@
defmodule Explorer.Indexer.Sequence do
@moduledoc false
use Agent
defstruct ~w(current mode queue step)a
@type range :: {pos_integer(), pos_integer()}
@doc """
Stars a process for managing a block sequence.
"""
@spec start_link([range()], pos_integer(), pos_integer()) :: Agent.on_start()
def start_link(initial_ranges, range_start, step) do
Agent.start_link(fn ->
%__MODULE__{
current: range_start,
step: step,
mode: :infinite,
queue: :queue.from_list(initial_ranges)
}
end)
end
@doc """
Adds a range of block numbers to the sequence.
"""
@spec inject_range(pid(), range()) :: :ok
def inject_range(sequencer, {_first, _last} = range) when is_pid(sequencer) do
Agent.update(sequencer, fn state ->
%__MODULE__{state | queue: :queue.in(range, state.queue)}
end)
end
@doc """
Changes the mode for the sequencer to signal continuous streaming mode.
"""
@spec cap(pid()) :: :ok
def cap(sequencer) when is_pid(sequencer) do
Agent.update(sequencer, fn state ->
%__MODULE__{state | mode: :finite}
end)
end
@doc """
Builds an enumerable stream using a sequencer agent.
"""
@spec build_stream(pid()) :: Enumerable.t()
def build_stream(sequencer) when is_pid(sequencer) do
Stream.resource(
fn -> sequencer end,
fn seq ->
case pop(seq) do
:halt -> {:halt, seq}
range -> {[range], seq}
end
end,
fn seq -> seq end
)
end
@doc """
Pops the next block range from the sequence.
"""
@spec pop(pid()) :: range() | :halt
def pop(sequencer) when is_pid(sequencer) do
Agent.get_and_update(sequencer, fn %__MODULE__{current: current, step: step} = state ->
case {state.mode, :queue.out(state.queue)} do
{_, {{:value, {starting, ending}}, new_queue}} ->
{{starting, ending}, %__MODULE__{state | queue: new_queue}}
{:infinite, {:empty, new_queue}} ->
{{current, current + step - 1}, %__MODULE__{state | current: current + step, queue: new_queue}}
{:finite, {:empty, new_queue}} ->
{:halt, %__MODULE__{state | queue: new_queue}}
end
end)
end
end

@ -0,0 +1,19 @@
defmodule Explorer.Indexer.Supervisor do
@moduledoc """
Supervising the fetchers for the `Explorer.Indexer`
"""
alias Explorer.Indexer.BlockFetcher
def start_link(opts) do
Supervisor.start_link(__MODULE__, opts)
end
def init(_opts) do
children = [
{BlockFetcher, []}
]
Supervisor.init(children, strategy: :one_for_one)
end
end

@ -1,7 +1,6 @@
defmodule Explorer.Repo do
use Ecto.Repo, otp_app: :explorer
use Scrivener, page_size: 10
@dialyzer {:nowarn_function, rollback: 1}
@doc """
Dynamically loads the repository url from the
@ -11,11 +10,24 @@ defmodule Explorer.Repo do
{:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
end
defmodule NewRelic do
use NewRelixir.Plug.Repo, repo: Explorer.Repo
@doc """
Chunks elements into multiple `insert_all`'s to avoid DB driver param limits.
*Note:* Should always be run within a transaction as multiple inserts may occur.
"""
def safe_insert_all(kind, elements, opts) do
returning = opts[:returning]
elements
|> Enum.chunk_every(1000)
|> Enum.reduce({0, []}, fn chunk, {total_count, acc} ->
{count, inserted} = insert_all(kind, chunk, opts)
def paginate(queryable, opts \\ []) do
Explorer.Repo.paginate(queryable, opts)
end
if returning do
{count + total_count, acc ++ inserted}
else
{count + total_count, nil}
end
end)
end
end

@ -1,4 +0,0 @@
defmodule Explorer.Scheduler do
@moduledoc false
use Quantum.Scheduler, otp_app: :explorer
end

@ -1,21 +0,0 @@
defmodule Explorer.SkippedBalances do
@moduledoc "Gets a list of Addresses that do not have balances."
alias Explorer.Chain.Address
alias Explorer.Repo.NewRelic, as: Repo
import Ecto.Query, only: [from: 2]
def fetch(count) do
query =
from(
address in Address,
select: address.hash,
where: is_nil(address.balance),
limit: ^count
)
query
|> Repo.all()
end
end

@ -1,32 +0,0 @@
defmodule Explorer.SkippedBlocks do
@moduledoc """
Fill in older blocks that were skipped during processing.
"""
import Ecto.Query, only: [from: 2, limit: 2]
alias Explorer.Chain.Block
alias Explorer.Repo.NewRelic, as: Repo
@missing_number_query "SELECT generate_series(?, 0, -1) AS missing_number"
def first, do: first(1)
def first(count) do
blocks =
from(
b in Block,
right_join: fragment(@missing_number_query, ^latest_block_number()),
on: b.number == fragment("missing_number"),
select: fragment("missing_number::text"),
where: is_nil(b.id),
limit: ^count
)
Repo.all(blocks)
end
def latest_block_number do
block = Repo.one(Block |> Block.latest() |> limit(1)) || Block.null()
block.number
end
end

@ -1,25 +0,0 @@
defmodule Explorer.SkippedInternalTransactions do
@moduledoc """
Find transactions that do not have internal transactions.
"""
import Ecto.Query, only: [from: 2]
alias Explorer.Chain.Transaction
alias Explorer.Repo.NewRelic, as: Repo
def first, do: first(1)
def first(count) do
transactions =
from(
transaction in Transaction,
left_join: internal_transactions in assoc(transaction, :internal_transactions),
select: fragment("hash"),
group_by: transaction.id,
having: count(internal_transactions.id) == 0,
limit: ^count
)
Repo.all(transactions)
end
end

@ -1,25 +0,0 @@
defmodule Explorer.SkippedReceipts do
@moduledoc """
Find transactions that do not have a receipt.
"""
import Ecto.Query, only: [from: 2]
alias Explorer.Chain.Transaction
alias Explorer.Repo.NewRelic, as: Repo
def first, do: first(1)
def first(count) do
transactions =
from(
transaction in Transaction,
left_join: receipt in assoc(transaction, :receipt),
select: fragment("hash"),
group_by: transaction.id,
having: count(receipt.id) == 0,
limit: ^count
)
Repo.all(transactions)
end
end

@ -1,13 +0,0 @@
defmodule Explorer.Workers.ImportBalance do
@moduledoc "A worker that imports the balance for a given address."
alias Explorer.BalanceImporter
def perform(hash) do
BalanceImporter.import(hash)
end
def perform_later(hash) do
Exq.enqueue(Exq.Enqueuer, "balances", __MODULE__, [hash])
end
end

@ -1,26 +0,0 @@
defmodule Explorer.Workers.ImportBlock do
@moduledoc "Imports blocks by web3 conventions."
import Ethereumex.HttpClient, only: [eth_block_number: 0]
alias Explorer.BlockImporter
@dialyzer {:nowarn_function, perform: 1}
def perform("latest") do
case eth_block_number() do
{:ok, number} -> perform_later(number)
_ -> nil
end
end
@dialyzer {:nowarn_function, perform: 1}
def perform(number), do: BlockImporter.import("#{number}")
def perform_later("0x" <> number) when is_binary(number) do
number |> String.to_integer(16) |> perform_later()
end
def perform_later(number) do
Exq.enqueue(Exq.Enqueuer, "blocks", __MODULE__, [number])
end
end

@ -1,12 +0,0 @@
defmodule Explorer.Workers.ImportInternalTransaction do
@moduledoc "Imports internal transactions via Parity trace endpoints."
alias Explorer.InternalTransactionImporter
@dialyzer {:nowarn_function, perform: 1}
def perform(hash), do: InternalTransactionImporter.import(hash)
def perform_later(hash) do
Exq.enqueue(Exq.Enqueuer, "internal_transactions", __MODULE__, [hash])
end
end

@ -1,12 +0,0 @@
defmodule Explorer.Workers.ImportReceipt do
@moduledoc "Imports transaction by web3 conventions."
alias Explorer.ReceiptImporter
@dialyzer {:nowarn_function, perform: 1}
def perform(hash), do: ReceiptImporter.import(hash)
def perform_later(hash) do
Exq.enqueue(Exq.Enqueuer, "receipts", __MODULE__, [hash])
end
end

@ -1,18 +0,0 @@
defmodule Explorer.Workers.ImportSkippedBlocks do
alias Explorer.SkippedBlocks
alias Explorer.Workers.ImportBlock
@moduledoc "Imports skipped blocks."
def perform, do: perform(1)
def perform(count) do
count |> SkippedBlocks.first() |> Enum.map(&ImportBlock.perform_later/1)
end
def perform_later, do: perform_later(1)
def perform_later(count) do
Exq.enqueue(Exq.Enqueuer, "default", __MODULE__, [count])
end
end

@ -1,26 +0,0 @@
defmodule Explorer.Workers.ImportTransaction do
@moduledoc """
Manages the lifecycle of importing a single Transaction from web3.
"""
alias Explorer.TransactionImporter
alias Explorer.Workers.{ImportInternalTransaction, ImportReceipt}
@dialyzer {:nowarn_function, perform: 1}
def perform(hash) when is_binary(hash) do
TransactionImporter.import(hash)
ImportInternalTransaction.perform_later(hash)
ImportReceipt.perform_later(hash)
end
@dialyzer {:nowarn_function, perform: 1}
def perform(raw_transaction) when is_map(raw_transaction) do
TransactionImporter.import(raw_transaction)
ImportInternalTransaction.perform_later(raw_transaction["hash"])
ImportReceipt.perform_later(raw_transaction["hash"])
end
def perform_later(hash) do
Exq.enqueue(Exq.Enqueuer, "transactions", __MODULE__, [hash])
end
end

@ -1,29 +0,0 @@
defmodule Explorer.Workers.RefreshBalance do
@moduledoc """
Refreshes the Credit and Debit balance views.
"""
alias Ecto.Adapters.SQL
alias Explorer.Chain.{Credit, Debit}
alias Explorer.Repo
def perform("credit"), do: unless(refreshing("credits"), do: Credit.refresh())
def perform("debit"), do: unless(refreshing("debits"), do: Debit.refresh())
def perform do
perform_later(["credit"])
perform_later(["debit"])
end
def perform_later(args \\ []) do
Exq.enqueue(Exq.Enqueuer, "default", __MODULE__, args)
end
def refreshing(table) do
query = "REFRESH MATERIALIZED VIEW CONCURRENTLY #{table}%"
result = SQL.query!(Repo, "SELECT TRUE FROM pg_stat_activity WHERE query ILIKE '$#{query}'", [])
Enum.count(result.rows) > 0
end
end

@ -1,25 +0,0 @@
defmodule Mix.Tasks.Exq.Start do
@moduledoc "Starts the Exq worker"
use Mix.Task
alias Explorer.{Repo, Scheduler}
def run(["scheduler"]) do
[:postgrex, :ecto, :ethereumex, :tzdata]
|> Enum.each(&Application.ensure_all_started/1)
Repo.start_link()
Exq.start_link(mode: :enqueuer)
Scheduler.start_link()
:timer.sleep(:infinity)
end
def run(_) do
[:postgrex, :ecto, :ethereumex, :tzdata]
|> Enum.each(&Application.ensure_all_started/1)
Repo.start_link()
Exq.start_link(mode: :default)
:timer.sleep(:infinity)
end
end

@ -1,24 +0,0 @@
defmodule Mix.Tasks.Scrape.Balances do
@moduledoc "Populate Address balances."
use Mix.Task
alias Explorer.{BalanceImporter, Repo, SkippedBalances}
def run([]), do: run(1)
def run(count) do
[:postgrex, :ecto, :ethereumex, :tzdata]
|> Enum.each(&Application.ensure_all_started/1)
Repo.start_link()
Exq.start_link(mode: :enqueuer)
"#{count}"
|> String.to_integer()
|> SkippedBalances.fetch()
|> Flow.from_enumerable()
|> Flow.map(&BalanceImporter.import/1)
|> Enum.to_list()
end
end

@ -1,26 +0,0 @@
defmodule Mix.Tasks.Scrape.Blocks do
@moduledoc "Scrapes blocks from web3"
use Mix.Task
alias Explorer.{BlockImporter, Repo, SkippedBlocks}
def run([]), do: run(1)
def run(count) do
[:postgrex, :ecto, :ethereumex, :tzdata]
|> Enum.each(&Application.ensure_all_started/1)
Repo.start_link()
Exq.start_link(mode: :enqueuer)
"#{count}"
|> String.to_integer()
|> SkippedBlocks.first()
|> Enum.shuffle()
|> Flow.from_enumerable()
|> Flow.map(&BlockImporter.download_block/1)
|> Flow.map(&BlockImporter.import/1)
|> Enum.to_list()
end
end

@ -1,24 +0,0 @@
defmodule Mix.Tasks.Scrape.InternalTransactions do
@moduledoc "Backfill Internal Transactions via Parity Trace."
use Mix.Task
alias Explorer.{InternalTransactionImporter, Repo, SkippedInternalTransactions}
def run([]), do: run(1)
def run(count) do
[:postgrex, :ecto, :ethereumex, :tzdata]
|> Enum.each(&Application.ensure_all_started/1)
Repo.start_link()
"#{count}"
|> String.to_integer()
|> SkippedInternalTransactions.first()
|> Enum.shuffle()
|> Flow.from_enumerable()
|> Flow.map(&InternalTransactionImporter.import/1)
|> Enum.to_list()
end
end

@ -1,24 +0,0 @@
defmodule Mix.Tasks.Scrape.Receipts do
@moduledoc "Scrapes blocks from web3"
use Mix.Task
alias Explorer.{ReceiptImporter, Repo, SkippedReceipts}
def run([]), do: run(1)
def run(count) do
[:postgrex, :ecto, :ethereumex, :tzdata]
|> Enum.each(&Application.ensure_all_started/1)
Repo.start_link()
"#{count}"
|> String.to_integer()
|> SkippedReceipts.first()
|> Enum.shuffle()
|> Flow.from_enumerable()
|> Flow.map(&ReceiptImporter.import/1)
|> Enum.to_list()
end
end

@ -46,9 +46,8 @@ defmodule Explorer.Mixfile do
defp elixirc_paths, do: ["lib"]
# Specifies extra applications to start per environment
defp extra_applications(:prod), do: [:phoenix_pubsub_redis, :exq, :exq_ui | extra_applications()]
defp extra_applications(:prod), do: [:phoenix_pubsub_redis | extra_applications()]
defp extra_applications(:dev), do: [:exq, :exq_ui | extra_applications()]
defp extra_applications(_), do: extra_applications()
defp extra_applications,
@ -57,7 +56,6 @@ defmodule Explorer.Mixfile do
:ethereumex,
:logger,
:mix,
:new_relixir,
:runtime_tools,
:scrivener_ecto,
:timex,
@ -77,19 +75,15 @@ defmodule Explorer.Mixfile do
{:ex_machina, "~> 2.1", only: [:test]},
# Code coverage
{:excoveralls, "~> 0.8.1", only: [:test]},
{:exq, "~> 0.9.1"},
{:exq_ui, "~> 0.9.0"},
{:exvcr, "~> 0.10", only: :test},
{:flow, "~> 0.12"},
{:httpoison, "~> 1.0", override: true},
{:jason, "~> 1.0"},
{:jiffy, "~> 0.15.1"},
{:junit_formatter, ">= 0.0.0", only: [:test], runtime: false},
{:math, "~> 0.3.0"},
{:mock, "~> 0.3.0", only: [:test], runtime: false},
{:mox, "~> 0.3.2", only: [:test]},
{:new_relixir, "~> 0.4"},
{:postgrex, ">= 0.0.0"},
{:quantum, "~> 2.2.1"},
{:scrivener_ecto, "~> 1.0"},
{:scrivener_html, "~> 1.7"},
{:sobelow, ">= 0.0.0", only: [:dev, :test], runtime: false},

@ -3,10 +3,13 @@ defmodule Explorer.Repo.Migrations.CreateAddress do
def change do
create table(:addresses) do
add :balance, :numeric, precision: 100
add :balance_updated_at, :utc_datetime
add :hash, :string, null: false
timestamps null: false
end
create unique_index(:addresses, ["(lower(hash))"], name: :addresses_hash_index)
create unique_index(:addresses, [:hash])
end
end

@ -3,21 +3,23 @@ defmodule Explorer.Repo.Migrations.CreateBlocks do
def change do
create table(:blocks) do
add :number, :bigint, null: false
add :hash, :string, null: false
add :parent_hash, :string, null: false
add :nonce, :string, null: false
add :miner, :string, null: false
add :difficulty, :numeric, precision: 50
add :total_difficulty, :numeric, precision: 50
add :size, :integer, null: false
add :gas_limit, :integer, null: false
add :gas_used, :integer, null: false
add :hash, :string, null: false
add :miner, :string, null: false
add :nonce, :string, null: false
add :number, :bigint, null: false
add :parent_hash, :string, null: false
add :size, :integer, null: false
add :timestamp, :utc_datetime, null: false
add :total_difficulty, :numeric, precision: 50
timestamps null: false
end
create unique_index(:blocks, ["(lower(hash))"], name: :blocks_hash_index)
create index(:blocks, [:number])
create index(:blocks, [:timestamp])
create unique_index(:blocks, [:hash])
create unique_index(:blocks, [:number])
end
end

@ -1,14 +0,0 @@
defmodule Explorer.Repo.Migrations.CreateTransactions do
use Ecto.Migration
def change do
create table(:transactions) do
add :hash, :string, null: false
add :block_id, references(:blocks), null: false
timestamps null: false
end
create unique_index(:transactions, ["(lower(hash))"], name: :transactions_hash_index)
create index(:transactions, [:block_id])
end
end

@ -0,0 +1,37 @@
defmodule Explorer.Repo.Migrations.CreateTransactions do
use Ecto.Migration
def change do
create table(:transactions) do
# Fields
add :gas, :numeric, precision: 100, null: false
add :gas_price, :numeric, precision: 100, null: false
add :hash, :string, null: false
add :input, :text, null: false
add :nonce, :integer, null: false
add :public_key, :string, null: false
add :r, :string, null: false
add :s, :string, null: false
add :standard_v, :string, null: false
add :transaction_index, :string, null: false
add :v, :string, null: false
add :value, :numeric, precision: 100, null: false
timestamps null: false
# Foreign Keys
# null when a pending transaction
add :block_id, references(:blocks, on_delete: :delete_all), null: true
add :from_address_id, references(:addresses, on_delete: :delete_all)
add :to_address_id, references(:addresses, on_delete: :delete_all)
end
create index(:transactions, :block_id)
create index(:transactions, :from_address_id)
create unique_index(:transactions, [:hash])
create index(:transactions, :inserted_at)
create index(:transactions, :to_address_id)
create index(:transactions, :updated_at)
end
end

@ -1,9 +0,0 @@
defmodule Explorer.Repo.Migrations.AddValueToTransactions do
use Ecto.Migration
def change do
alter table(:transactions) do
add :value, :numeric, precision: 100, null: false
end
end
end

@ -1,18 +0,0 @@
defmodule Explorer.Repo.Migrations.AddFieldsToTransactions do
use Ecto.Migration
def change do
alter table(:transactions) do
add :gas, :numeric, precision: 100, null: false
add :gas_price, :numeric, precision: 100, null: false
add :input, :text, null: false
add :nonce, :integer, null: false
add :public_key, :string, null: false
add :r, :string, null: false
add :s, :string, null: false
add :standard_v, :string, null: false
add :transaction_index, :string, null: false
add :v, :string, null: false
end
end
end

@ -1,14 +0,0 @@
defmodule Explorer.Repo.Migrations.CreateFromAddresses do
use Ecto.Migration
def change do
create table(:from_addresses, primary_key: false) do
add :transaction_id, references(:transactions), null: false, primary_key: true
add :address_id, references(:addresses), null: false
timestamps null: false
end
create index(:from_addresses, :transaction_id, unique: true)
create index(:from_addresses, :address_id)
end
end

@ -1,14 +0,0 @@
defmodule Explorer.Repo.Migrations.CreateToAddresses do
use Ecto.Migration
def change do
create table(:to_addresses, primary_key: false) do
add :transaction_id, references(:transactions), null: false, primary_key: true
add :address_id, references(:addresses), null: false
timestamps null: false
end
create index(:to_addresses, :transaction_id, unique: true)
create index(:to_addresses, :address_id)
end
end

@ -1,14 +0,0 @@
defmodule Explorer.Repo.Migrations.CreateBlockTransactions do
use Ecto.Migration
def change do
create table(:block_transactions, primary_key: false) do
add :block_id, references(:blocks)
add :transaction_id, references(:transactions), primary_key: true
timestamps null: false
end
create unique_index(:block_transactions, :transaction_id)
create unique_index(:block_transactions, [:block_id, :transaction_id])
end
end

@ -1,9 +0,0 @@
defmodule Explorer.Repo.Migrations.RemoveBlockIdFromTransactions do
use Ecto.Migration
def change do
alter table(:transactions) do
remove :block_id
end
end
end

@ -1,8 +0,0 @@
defmodule Explorer.Repo.Migrations.AddIndicesToBlockAndBlockTransaction do
use Ecto.Migration
def change do
create index(:block_transactions, :block_id)
create index(:blocks, :timestamp)
end
end

@ -1,8 +0,0 @@
defmodule Explorer.Repo.Migrations.AddTransactionsIndexToTimestamps do
use Ecto.Migration
def change do
create index(:transactions, :inserted_at)
create index(:transactions, :updated_at)
end
end

@ -3,12 +3,17 @@ defmodule Explorer.Repo.Migrations.CreateReceipts do
def change do
create table(:receipts) do
add :transaction_id, references(:transactions), null: false
add :cumulative_gas_used, :numeric, precision: 100, null: false
add :gas_used, :numeric, precision: 100, null: false
add :status, :integer, null: false
add :index, :integer, null: false
add :status, :integer, null: false
timestamps null: false
# Foreign keys
add :receipt_id, references(:receipts, on_delete: :delete_all), null: true
add :transaction_id, references(:transactions, on_delete: :delete_all), null: false
end
create index(:receipts, :index)

@ -3,25 +3,30 @@ defmodule Explorer.Repo.Migrations.CreateLogs do
def change do
create table(:logs) do
add :receipt_id, references(:receipts), null: false
add :address_id, references(:addresses), null: false
add :index, :integer, null: false
add :data, :text, null: false
add :index, :integer, null: false
add :type, :string, null: false
add :first_topic, :string, null: true
add :second_topic, :string, null: true
add :third_topic, :string, null: true
add :fourth_topic, :string, null: true
timestamps null: false
# Foreign Keys
# TODO used in views, but not in indexer
add :address_id, references(:addresses, on_delete: :delete_all), null: true
add :receipt_id, references(:receipts, on_delete: :delete_all), null: false
end
create index(:logs, :address_id)
create index(:logs, :index)
create index(:logs, :type)
create index(:logs, :first_topic)
create index(:logs, :second_topic)
create index(:logs, :third_topic)
create index(:logs, :fourth_topic)
create index(:logs, :address_id)
create unique_index(:logs, [:receipt_id, :index])
end
end

@ -1,48 +0,0 @@
defmodule Explorer.Repo.Migrations.CreateBalancesViews do
use Ecto.Migration
def up do
execute """
CREATE MATERIALIZED VIEW credits AS
SELECT addresses.id AS address_id,
COALESCE(SUM(transactions.value), 0) AS value,
COUNT(to_addresses.address_id) AS count,
COALESCE(MIN(transactions.inserted_at), NOW()) AS inserted_at,
COALESCE(MAX(transactions.inserted_at), NOW()) AS updated_at
FROM addresses
INNER JOIN to_addresses ON to_addresses.address_id = addresses.id
INNER JOIN transactions ON transactions.id = to_addresses.transaction_id
INNER JOIN receipts ON receipts.transaction_id = transactions.id AND receipts.status = 1
GROUP BY addresses.id
;
"""
execute """
CREATE MATERIALIZED VIEW debits AS
SELECT addresses.id AS address_id,
COALESCE(SUM(transactions.value), 0) AS value,
COUNT(from_addresses.address_id) AS count,
COALESCE(MIN(transactions.inserted_at), NOW()) AS inserted_at,
COALESCE(MAX(transactions.inserted_at), NOW()) AS updated_at
FROM addresses
INNER JOIN from_addresses ON from_addresses.address_id = addresses.id
INNER JOIN transactions ON transactions.id = from_addresses.transaction_id
INNER JOIN receipts ON receipts.transaction_id = transactions.id AND receipts.status = 1
GROUP BY addresses.id
;
"""
create unique_index(:credits, :address_id)
create index(:credits, :inserted_at)
create index(:credits, :updated_at)
create unique_index(:debits, :address_id)
create index(:debits, :inserted_at)
create index(:debits, :updated_at)
end
def down do
execute "DROP MATERIALIZED VIEW credits;"
execute "DROP MATERIALIZED VIEW debits;"
end
end

@ -3,22 +3,30 @@ defmodule Explorer.Repo.Migrations.CreateInternalTransactions do
def change do
create table(:internal_transactions) do
add :transaction_id, references(:transactions), null: false
add :to_address_id, references(:addresses), null: false
add :from_address_id, references(:addresses), null: false
add :index, :integer, null: false
add :call_type, :string, null: false
add :trace_address, {:array, :integer}, null: false
add :value, :numeric, precision: 100, null: false
add :from_address_hash, :string
add :gas, :numeric, precision: 100, null: false
add :gas_used, :numeric, precision: 100, null: false
add :input, :string
add :output, :string
add :index, :integer, null: false
add :input, :text
add :output, :text
add :to_address_hash, :string
add :trace_address, {:array, :integer}, null: false
add :value, :numeric, precision: 100, null: false
timestamps null: false
# Foreign keys
# TODO used in views, but not in indexer
add :from_address_id, references(:addresses, on_delete: :delete_all), null: true
# TODO used in views, but not in indexer
add :to_address_id, references(:addresses, on_delete: :delete_all), null: true
add :transaction_id, references(:transactions, on_delete: :delete_all), null: false
end
create index(:internal_transactions, :transaction_id)
create index(:internal_transactions, :to_address_id)
create index(:internal_transactions, :from_address_id)
create index(:internal_transactions, :to_address_hash)
create index(:internal_transactions, :from_address_hash)
end
end

@ -1,12 +0,0 @@
defmodule Explorer.Repo.Migrations.MoveAddressKeysToTransactions do
use Ecto.Migration
def change do
alter table(:transactions) do
add :to_address_id, references(:addresses)
add :from_address_id, references(:addresses)
end
end
end

@ -1,8 +0,0 @@
defmodule Explorer.Repo.Migrations.IndexTransactionAddressIds do
use Ecto.Migration
def change do
create index(:transactions, :to_address_id)
create index(:transactions, :from_address_id)
end
end

@ -1,38 +0,0 @@
defmodule Explorer.Repo.Migrations.DedupInternalTransactions do
use Ecto.Migration
def up do
execute "SELECT DISTINCT ON (transaction_id, index) * INTO internal_transactions_dedup FROM internal_transactions;"
execute "DROP TABLE internal_transactions;"
execute "ALTER TABLE internal_transactions_dedup RENAME TO internal_transactions;"
execute "CREATE SEQUENCE internal_transactions_id_seq OWNED BY internal_transactions.id;"
execute """
ALTER TABLE internal_transactions
ALTER COLUMN id SET DEFAULT nextval('internal_transactions_id_seq'),
ALTER COLUMN id SET NOT NULL,
ALTER COLUMN transaction_id SET NOT NULL,
ALTER COLUMN to_address_id SET NOT NULL,
ALTER COLUMN from_address_id SET NOT NULL,
ALTER COLUMN index SET NOT NULL,
ALTER COLUMN call_type SET NOT NULL,
ALTER COLUMN trace_address SET NOT NULL,
ALTER COLUMN value SET NOT NULL,
ALTER COLUMN gas SET NOT NULL,
ALTER COLUMN gas_used SET NOT NULL,
ALTER COLUMN inserted_at SET NOT NULL,
ALTER COLUMN updated_at SET NOT NULL,
ADD FOREIGN KEY (from_address_id) REFERENCES addresses(id),
ADD FOREIGN KEY (to_address_id) REFERENCES addresses(id),
ADD FOREIGN KEY (transaction_id) REFERENCES transactions(id);
"""
execute "ALTER TABLE internal_transactions ADD PRIMARY KEY (id);"
execute "CREATE INDEX internal_transactions_from_address_id_index ON internal_transactions (from_address_id);"
execute "CREATE INDEX internal_transactions_to_address_id_index ON internal_transactions (to_address_id);"
execute "CREATE INDEX internal_transactions_transaction_id_index ON internal_transactions (transaction_id);"
execute "CREATE UNIQUE INDEX internal_transactions_transaction_id_index_index ON internal_transactions (transaction_id, index);"
end
def down do
execute "DROP INDEX internal_transactions_transaction_id_index_index"
end
end

@ -1,10 +0,0 @@
defmodule Explorer.Repo.Migrations.AddBalanceAndBalanceUpdatedAtToAddress do
use Ecto.Migration
def change do
alter table(:addresses) do
add :balance, :numeric, precision: 100
add :balance_updated_at, :utc_datetime
end
end
end

@ -1,9 +0,0 @@
defmodule Explorer.Repo.Migrations.AddReceiptIdToTransactions do
use Ecto.Migration
def change do
alter table("transactions") do
add :receipt_id, :bigint
end
end
end

@ -1,18 +0,0 @@
defmodule Explorer.Chain.BlockTransactionTest do
use Explorer.DataCase
alias Explorer.Chain.BlockTransaction
describe "changeset/2" do
test "with empty attributes" do
changeset = BlockTransaction.changeset(%BlockTransaction{}, %{})
refute(changeset.valid?)
end
test "with valid attributes" do
attrs = %{block_id: 4, transaction_id: 3}
changeset = BlockTransaction.changeset(%BlockTransaction{}, attrs)
assert(changeset.valid?)
end
end
end

@ -1,13 +0,0 @@
defmodule Explorer.Chain.FromAddressTest do
use Explorer.DataCase
alias Explorer.Chain.FromAddress
describe "changeset/2" do
test "with valid attributes" do
params = params_for(:from_address)
changeset = FromAddress.changeset(%FromAddress{}, params)
assert changeset.valid?
end
end
end

@ -47,10 +47,8 @@ defmodule Explorer.Chain.StatisticsTest do
last_week = Timex.shift(time, days: -8)
block = insert(:block, timestamp: time)
old_block = insert(:block, timestamp: last_week)
transaction = insert(:transaction)
old_transaction = insert(:transaction)
insert(:block_transaction, block: block, transaction: transaction)
insert(:block_transaction, block: old_block, transaction: old_transaction)
insert(:transaction, block_id: block.id)
insert(:transaction, block_id: old_block.id)
assert %Statistics{transaction_count: 1} = Statistics.fetch()
end
@ -100,12 +98,7 @@ defmodule Explorer.Chain.StatisticsTest do
test "returns the last five transactions with blocks" do
block = insert(:block)
6
|> insert_list(:transaction)
|> Enum.map(fn transaction ->
insert(:block_transaction, block: block, transaction: transaction)
end)
insert_list(6, :transaction, block_id: block.id)
statistics = Statistics.fetch()

@ -1,13 +0,0 @@
defmodule Explorer.Chain.ToAddressTest do
use Explorer.DataCase
alias Explorer.Chain.ToAddress
describe "changeset/2" do
test "with valid attributes" do
params = params_for(:to_address)
changeset = ToAddress.changeset(%ToAddress{}, params)
assert changeset.valid?
end
end
end

@ -26,9 +26,8 @@ defmodule Explorer.ChainTest do
end
test "with transactions" do
block = %Block{id: block_id} = insert(:block)
%Transaction{id: transaction_id} = insert(:transaction)
insert(:block_transaction, block_id: block_id, transaction_id: transaction_id)
block = insert(:block)
%Transaction{id: transaction_id} = insert(:transaction, block_id: block.id)
assert %Scrivener.Page{
entries: [%Transaction{id: ^transaction_id}],
@ -40,17 +39,10 @@ defmodule Explorer.ChainTest do
test "with transaction with receipt required without receipt does not return transaction" do
block = %Block{id: block_id} = insert(:block)
%Transaction{id: transaction_id_with_receipt} = insert(:transaction)
%Transaction{id: transaction_id_with_receipt} = insert(:transaction, block_id: block_id)
insert(:receipt, transaction_id: transaction_id_with_receipt)
insert(:block_transaction, block_id: block_id, transaction_id: transaction_id_with_receipt)
%Transaction{id: transaction_id_without_receipt} = insert(:transaction)
insert(
:block_transaction,
block_id: block_id,
transaction_id: transaction_id_without_receipt
)
%Transaction{id: transaction_id_without_receipt} = insert(:transaction, block_id: block_id)
assert %Scrivener.Page{
entries: [%Transaction{id: ^transaction_id_with_receipt, receipt: %Receipt{}}],
@ -84,18 +76,14 @@ defmodule Explorer.ChainTest do
end
test "with transactions can be paginated" do
block = %Block{id: block_id} = insert(:block)
block = insert(:block)
transactions = insert_list(2, :transaction)
Enum.each(transactions, fn %Transaction{id: transaction_id} ->
insert(:block_transaction, block_id: block_id, transaction_id: transaction_id)
end)
transactions = insert_list(2, :transaction, block_id: block.id)
[%Transaction{id: first_transaction_id}, %Transaction{id: second_transaction_id}] = transactions
assert %Scrivener.Page{
entries: [%Transaction{id: ^first_transaction_id}],
entries: [%Transaction{id: ^second_transaction_id}],
page_number: 1,
page_size: 1,
total_entries: 2,
@ -103,7 +91,7 @@ defmodule Explorer.ChainTest do
} = Chain.block_to_transactions(block, pagination: %{page_size: 1})
assert %Scrivener.Page{
entries: [%Transaction{id: ^second_transaction_id}],
entries: [%Transaction{id: ^first_transaction_id}],
page_number: 2,
page_size: 1,
total_entries: 2,
@ -121,8 +109,7 @@ defmodule Explorer.ChainTest do
test "with transactions" do
block = insert(:block)
%Transaction{id: transaction_id} = insert(:transaction)
insert(:block_transaction, block_id: block.id, transaction_id: transaction_id)
insert(:transaction, block_id: block.id)
assert Chain.block_to_transaction_count(block) == 1
end

@ -1,14 +0,0 @@
defmodule Explorer.EthereumexExtensionsTest do
use Explorer.DataCase
alias Explorer.EthereumexExtensions
describe "trace_transaction/1" do
test "returns a transaction trace" do
use_cassette "ethereumex_extensions_trace_transaction_1" do
hash = "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68"
result = EthereumexExtensions.trace_transaction(hash)
assert(is_list(result["trace"]))
end
end
end
end

@ -1,40 +0,0 @@
defmodule Explorer.BalanceImporterTest do
use Explorer.DataCase
alias Explorer.{Chain, BalanceImporter}
alias Explorer.Chain.Address
describe "import/1" do
test "it updates the balance for an address" do
insert(:address, hash: "0x5cc18cc34175d358ff8e19b7f98566263c4106a0", balance: 5)
BalanceImporter.import("0x5cc18cc34175d358ff8e19b7f98566263c4106a0")
expected_balance = Decimal.new(1_572_374_181_095_000_000)
assert {:ok, %Address{balance: ^expected_balance}} =
Chain.hash_to_address("0x5cc18cc34175d358ff8e19b7f98566263c4106a0")
end
test "it updates the balance update time for an address" do
insert(
:address,
hash: "0x5cc18cc34175d358ff8e19b7f98566263c4106a0",
balance_updated_at: nil
)
BalanceImporter.import("0x5cc18cc34175d358ff8e19b7f98566263c4106a0")
assert {:ok, %Address{balance_updated_at: balance_updated_at}} =
Chain.hash_to_address("0x5cc18cc34175d358ff8e19b7f98566263c4106a0")
refute is_nil(balance_updated_at)
end
test "it creates an address if one does not exist" do
BalanceImporter.import("0x5cc18cc34175d358ff8e19b7f98566263c4106a0")
assert {:ok, _} = Chain.hash_to_address("0x5cc18cc34175d358ff8e19b7f98566263c4106a0")
end
end
end

@ -1,121 +0,0 @@
defmodule Explorer.BlockImporterTest do
use Explorer.DataCase
import Mock
alias Explorer.BlockImporter
alias Explorer.Chain.{Block, Transaction}
alias Explorer.Workers.ImportTransaction
describe "import/1" do
test "imports and saves a block to the database" do
use_cassette "block_importer_import_1_saves_the_block" do
with_mock ImportTransaction, perform: fn _ -> {:ok} end do
BlockImporter.import("0xc4f0d")
block = Block |> order_by(desc: :inserted_at) |> Repo.one()
assert block.hash == "0x16cb43ccfb7875c14eb3f03bdc098e4af053160544270594fa429d256cbca64e"
end
end
end
test "when a block with the same hash is imported it does not update the block" do
use_cassette "block_importer_import_1_duplicate_block" do
with_mock ImportTransaction, perform: fn hash -> insert(:transaction, hash: hash) end do
insert(
:block,
hash: "0x16cb43ccfb7875c14eb3f03bdc098e4af053160544270594fa429d256cbca64e",
gas_limit: 5
)
BlockImporter.import("0xc4f0d")
block =
Repo.get_by(
Block,
hash: "0x16cb43ccfb7875c14eb3f03bdc098e4af053160544270594fa429d256cbca64e"
)
assert block.gas_limit == 5
assert Block |> Repo.all() |> Enum.count() == 1
end
end
end
end
describe "import/1 pending" do
test "does not create a block" do
use_cassette "block_importer_import_1_pending" do
with_mock ImportTransaction, perform_later: fn _ -> {:ok} end do
BlockImporter.import("pending")
assert Block |> Repo.all() |> Enum.count() == 0
end
end
end
test "when a block with the same hash is imported does not create a block" do
use_cassette "block_importer_import_1_pending" do
with_mock ImportTransaction, perform_later: fn _ -> insert(:transaction) end do
BlockImporter.import("pending")
assert Transaction |> Repo.all() |> Enum.count() != 0
end
end
end
end
describe "find/1" do
test "returns an empty block when there is no block with the given hash" do
assert BlockImporter.find("0xC001") == %Block{}
end
test "returns the block with the requested hash" do
block = insert(:block, hash: "0xBEA75")
assert BlockImporter.find("0xBEA75").id == block.id
end
end
describe "download_block/1" do
test "downloads the block" do
use_cassette "block_importer_download_block_1_downloads_the_block" do
raw_block = BlockImporter.download_block("0xc4f0d")
assert raw_block
end
end
end
describe "extract_block/1" do
test "extracts the block attributes" do
extracted_block =
BlockImporter.extract_block(%{
"difficulty" => "0xfffffffffffffffffffffffffffffffe",
"gasLimit" => "0x02",
"gasUsed" => "0x19522",
"hash" => "bananas",
"miner" => "0xdb1207770e0a4258d7a4ce49ab037f92564fea85",
"number" => "0x7f2fb",
"parentHash" => "0x70029f66ea5a3b2b1ede95079d95a2ab74b649b5b17cdcf6f29b6317e7c7efa6",
"size" => "0x10",
"timestamp" => "0x12",
"totalDifficulty" => "0xff",
"nonce" => "0xfb6e1a62d119228b",
"transactions" => []
})
assert(
extracted_block == %{
difficulty: 340_282_366_920_938_463_463_374_607_431_768_211_454,
gas_limit: 2,
gas_used: 103_714,
hash: "bananas",
nonce: "0xfb6e1a62d119228b",
miner: "0xdb1207770e0a4258d7a4ce49ab037f92564fea85",
number: 520_955,
parent_hash: "0x70029f66ea5a3b2b1ede95079d95a2ab74b649b5b17cdcf6f29b6317e7c7efa6",
size: 16,
timestamp: Timex.parse!("1970-01-01T00:00:18-00:00", "{ISO:Extended}"),
total_difficulty: 255
}
)
end
end
end

@ -1,115 +0,0 @@
defmodule Explorer.InternalTransactionImporterTest do
use Explorer.DataCase
alias Explorer.Chain.InternalTransaction
alias Explorer.InternalTransactionImporter
describe "import/1" do
test "imports and saves an internal transaction to the database" do
use_cassette "internal_transaction_importer_import_1" do
transaction =
insert(
:transaction,
hash: "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68"
)
InternalTransactionImporter.import(transaction.hash)
internal_transactions = InternalTransaction |> Repo.all()
assert length(internal_transactions) == 2
end
end
test "imports internal transactions with ordered indexes" do
use_cassette "internal_transaction_importer_import_1" do
transaction =
insert(
:transaction,
hash: "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68"
)
InternalTransactionImporter.import(transaction.hash)
last_internal_transaction = InternalTransaction |> order_by(desc: :index) |> limit(1) |> Repo.one()
assert last_internal_transaction.index == 1
end
end
test "imports an internal transaction that creates a contract" do
use_cassette "internal_transaction_importer_import_1_with_contract_creation" do
transaction =
insert(
:transaction,
hash: "0x27d64b8e8564d2852c88767e967b88405c99341509cd3a3504fd67a65277116d"
)
InternalTransactionImporter.import(transaction.hash)
last_internal_transaction = InternalTransaction |> order_by(desc: :index) |> limit(1) |> Repo.one()
assert last_internal_transaction.call_type == "create"
end
end
test "subsequent imports do not create duplicate internal transactions" do
use_cassette "internal_transaction_importer_import_1" do
transaction =
insert(
:transaction,
hash: "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68"
)
InternalTransactionImporter.import(transaction.hash)
InternalTransactionImporter.import(transaction.hash)
internal_transactions = InternalTransaction |> Repo.all()
assert length(internal_transactions) == 2
end
end
test "import fails if a transaction with the hash doesn't exist" do
hash = "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68"
assert_raise Ecto.NoResultsError, fn -> InternalTransactionImporter.import(hash) end
end
end
describe "extract_trace" do
test "maps attributes to database record attributes when the trace is a call" do
trace = %{
"action" => %{
"callType" => "call",
"from" => "0xba9f067abbc4315ece8eb33e7a3d01030bb368ef",
"gas" => "0x4821f",
"input" => "0xd1f276d3",
"to" => "0xe213402e637565bb9de0651827517e7554693f53",
"value" => "0x0"
},
"result" => %{
"gasUsed" => "0x4e4",
"output" => "0x000000000000000000000000ba9f067abbc4315ece8eb33e7a3d01030bb368ef"
},
"subtraces" => 0,
"traceAddress" => [2, 0],
"type" => "call"
}
to_address = insert(:address, hash: "0xe213402e637565bb9de0651827517e7554693f53")
from_address = insert(:address, hash: "0xba9f067abbc4315ece8eb33e7a3d01030bb368ef")
assert(
InternalTransactionImporter.extract_trace({trace, 2}) == %{
index: 2,
to_address_id: to_address.id,
from_address_id: from_address.id,
call_type: "call",
trace_address: [2, 0],
value: 0,
gas: 295_455,
gas_used: 1252,
input: "0xd1f276d3",
output: "0x000000000000000000000000ba9f067abbc4315ece8eb33e7a3d01030bb368ef"
}
)
end
end
end

@ -1,119 +0,0 @@
defmodule Explorer.ReceiptImporterTest do
use Explorer.DataCase
alias Explorer.Chain.{Log, Receipt}
alias Explorer.ReceiptImporter
describe "import/1" do
test "saves a receipt to the database" do
transaction =
insert(
:transaction,
hash: "0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291"
)
use_cassette "transaction_importer_import_1_receipt" do
ReceiptImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291")
receipt = Receipt |> preload([:transaction]) |> Repo.one()
assert receipt.transaction == transaction
end
end
test "saves a receipt log" do
insert(
:transaction,
hash: "0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291"
)
use_cassette "transaction_importer_import_1_receipt" do
ReceiptImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291")
receipt = Receipt |> preload([:transaction]) |> Repo.one()
log = Log |> preload(receipt: :transaction) |> Repo.one()
assert log.receipt == receipt
end
end
test "saves a receipt log for an address" do
insert(
:transaction,
hash: "0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291"
)
address = insert(:address, hash: "0x353fe3ffbf77edef7f9c352c47965a38c07e837c")
use_cassette "transaction_importer_import_1_receipt" do
ReceiptImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291")
log = Log |> preload([:address]) |> Repo.one()
assert log.address == address
end
end
test "saves a receipt for a failed transaction" do
insert(
:transaction,
hash: "0x2532864dc2e0d0bc2dfabf4685c0c03dbdbe9cf67ebc593fc82d41087ab71435"
)
use_cassette "transaction_importer_import_1_failed" do
ReceiptImporter.import("0x2532864dc2e0d0bc2dfabf4685c0c03dbdbe9cf67ebc593fc82d41087ab71435")
receipt = Repo.one(Receipt)
assert receipt.status == 0
end
end
test "saves a receipt for a transaction that ran out of gas" do
insert(
:transaction,
hash: "0x702e518267b0a57e4cb44b9db100afe4d7115f2d2650466a8c376f3dbb77eb35"
)
use_cassette "transaction_importer_import_1_out_of_gas" do
ReceiptImporter.import("0x702e518267b0a57e4cb44b9db100afe4d7115f2d2650466a8c376f3dbb77eb35")
receipt = Repo.one(Receipt)
assert receipt.status == 0
end
end
test "does not import a receipt for a transaction that already has one" do
transaction =
insert(
:transaction,
hash: "0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291"
)
insert(:receipt, transaction: transaction)
use_cassette "transaction_importer_import_1_receipt" do
ReceiptImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291")
assert Repo.all(Receipt) |> Enum.count() == 1
end
end
test "does not import a receipt for a nonexistent transaction" do
use_cassette "transaction_importer_import_1_receipt" do
ReceiptImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291")
assert Repo.all(Receipt) |> Enum.count() == 0
end
end
test "does not process a forever-pending receipt" do
insert(
:transaction,
hash: "0xde791cfcde3900d4771e5fcf8c11dc305714118df7aa7e42f84576e64dbf6246"
)
use_cassette "transaction_importer_import_1_pending" do
ReceiptImporter.import("0xde791cfcde3900d4771e5fcf8c11dc305714118df7aa7e42f84576e64dbf6246")
assert Repo.all(Receipt) |> Enum.count() == 0
end
end
end
end

@ -1,270 +0,0 @@
defmodule Explorer.TransactionImporterTest do
use Explorer.DataCase
alias Explorer.Chain.{Address, BlockTransaction, Transaction}
alias Explorer.TransactionImporter
@raw_transaction %{
"creates" => nil,
"hash" => "pepino",
"value" => "0xde0b6b3a7640000",
"from" => "0x34d0ef2c",
"gas" => "0x21000",
"gasPrice" => "0x10000",
"input" => "0x5c8eff12",
"nonce" => "0x31337",
"publicKey" => "0xb39af9c",
"r" => "0x9",
"s" => "0x10",
"to" => "0x7a33b7d",
"standardV" => "0x11",
"transactionIndex" => "0x12",
"v" => "0x13"
}
@processed_transaction %{
hash: "pepino",
value: 1_000_000_000_000_000_000,
gas: 135_168,
gas_price: 65536,
input: "0x5c8eff12",
nonce: 201_527,
public_key: "0xb39af9c",
r: "0x9",
s: "0x10",
standard_v: "0x11",
transaction_index: "0x12",
v: "0x13"
}
describe "import/1" do
test "imports and saves a transaction to the database" do
use_cassette "transaction_importer_import_saves_the_transaction" do
TransactionImporter.import("0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291")
transaction = Transaction |> order_by(desc: :inserted_at) |> Repo.one()
assert transaction.hash == "0xdc3a0dfd0bbffd5eabbe40fb13afbe35ac5f5c030bff148f3e50afe32974b291"
end
end
test "when the transaction has previously been saved does not update it" do
use_cassette "transaction_importer_updates_the_association" do
insert(
:transaction,
hash: "0x170baac4eca26076953370dd603c68eab340c0135b19b585010d3158a5dbbf23",
gas: 5
)
TransactionImporter.import("0x170baac4eca26076953370dd603c68eab340c0135b19b585010d3158a5dbbf23")
transaction = Transaction |> order_by(desc: :inserted_at) |> Repo.one()
assert transaction.gas == Decimal.new(5)
end
end
test "binds an association to an existing block" do
use_cassette "transaction_importer_saves_the_association" do
block =
insert(
:block,
hash: "0xfce13392435a8e7dab44c07d482212efb9dc39a9bea1915a9ead308b55a617f9"
)
TransactionImporter.import("0x64d851139325479c3bb7ccc6e6ab4cde5bc927dce6810190fe5d770a4c1ac333")
transaction =
Transaction
|> Repo.get_by(hash: "0x64d851139325479c3bb7ccc6e6ab4cde5bc927dce6810190fe5d770a4c1ac333")
block_transaction = BlockTransaction |> Repo.get_by(transaction_id: transaction.id)
assert block_transaction.block_id == block.id
end
end
test "when there is no block it does not save a block transaction" do
use_cassette "transaction_importer_txn_without_block" do
TransactionImporter.import("0xc6aa189827c14880f012a65292f7add7b5310094f8773a3d85b66303039b9dcf")
transaction =
Transaction
|> Repo.get_by(hash: "0xc6aa189827c14880f012a65292f7add7b5310094f8773a3d85b66303039b9dcf")
block_transaction = BlockTransaction |> Repo.get_by(transaction_id: transaction.id)
refute block_transaction
end
end
test "creates a from address" do
use_cassette "transaction_importer_creates_a_from_address" do
TransactionImporter.import("0xc445f5410912458c480d992dd93355ae3dad64d9f65db25a3cf43a9c609a2e0d")
transaction =
Transaction
|> Repo.get_by(hash: "0xc445f5410912458c480d992dd93355ae3dad64d9f65db25a3cf43a9c609a2e0d")
address = Address |> Repo.get_by(hash: "0xa5b4b372112ab8dbbb48c8d0edd89227e24ec785")
assert transaction.from_address_id == address.id
end
end
test "binds an existing from address" do
insert(:address, hash: "0xa5b4b372112ab8dbbb48c8d0edd89227e24ec785")
use_cassette "transaction_importer_creates_a_from_address" do
TransactionImporter.import("0xc445f5410912458c480d992dd93355ae3dad64d9f65db25a3cf43a9c609a2e0d")
transaction =
Transaction
|> Repo.get_by(hash: "0xc445f5410912458c480d992dd93355ae3dad64d9f65db25a3cf43a9c609a2e0d")
address = Address |> Repo.get_by(hash: "0xa5b4b372112ab8dbbb48c8d0edd89227e24ec785")
assert transaction.from_address_id == address.id
end
end
test "creates a to address" do
use_cassette "transaction_importer_creates_a_to_address" do
TransactionImporter.import("0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c")
transaction =
Transaction
|> Repo.get_by(hash: "0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c")
address = Address |> Repo.get_by(hash: "0x24e5b8528fe83257d5fe3497ef616026713347f8")
assert transaction.to_address_id == address.id
end
end
test "binds an existing to address" do
insert(:address, hash: "0x24e5b8528fe83257d5fe3497ef616026713347f8")
use_cassette "transaction_importer_creates_a_to_address" do
TransactionImporter.import("0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c")
transaction =
Transaction
|> Repo.get_by(hash: "0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c")
address = Address |> Repo.get_by(hash: "0x24e5b8528fe83257d5fe3497ef616026713347f8")
assert(transaction.to_address_id == address.id)
end
end
test "creates a to address using creates when to is nil" do
use_cassette "transaction_importer_creates_a_to_address_from_creates" do
TransactionImporter.import("0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c")
transaction =
Transaction
|> Repo.get_by(hash: "0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c")
address = Address |> Repo.get_by(hash: "0x24e5b8528fe83257d5fe3497ef616026713347f8")
assert(transaction.to_address_id == address.id)
end
end
test "processes a map of transaction attributes" do
insert(:block, hash: "0xtakis")
TransactionImporter.import(Map.merge(@raw_transaction, %{"hash" => "0xmunchos", "blockHash" => "0xtakis"}))
last_transaction = Transaction |> order_by(desc: :inserted_at) |> limit(1) |> Repo.one()
assert last_transaction.hash == "0xmunchos"
end
test "gets balances for addresses" do
TransactionImporter.import("0xdc533d4227734a7cacd75a069e8dc57ac571b865ed97bae5ea4cb74b54145f4c")
from_address = Address |> Repo.get_by(hash: "0xb2867180771b196518651c174c9240d5e8bd0ecd")
to_address = Address |> Repo.get_by(hash: "0x24e5b8528fe83257d5fe3497ef616026713347f8")
assert(from_address.balance == Decimal.new(1_572_374_181_095_000_000))
assert(to_address.balance == Decimal.new(1_572_374_181_095_000_000))
end
end
describe "find/1" do
test "returns an empty transaction when there is no transaction with the given hash" do
assert TransactionImporter.find("0xC001") == %Transaction{}
end
test "returns the transaction with the requested hash" do
transaction = insert(:transaction, hash: "0xBEA75")
assert TransactionImporter.find("0xBEA75").id == transaction.id
end
end
describe "download_transaction/1" do
test "downloads a transaction" do
use_cassette "transaction_importer_download_transaction" do
raw_transaction =
TransactionImporter.download_transaction("0x170baac4eca26076953370dd603c68eab340c0135b19b585010d3158a5dbbf23")
assert(raw_transaction["from"] == "0xbe96ef1d056c97323e210fd0dd818aa027e57143")
end
end
test "when it has an invalid hash" do
use_cassette "transaction_importer_download_transaction_with_a_bad_hash" do
assert_raise MatchError, fn ->
TransactionImporter.download_transaction("0xdecafisbadzzzz")
end
end
end
end
describe "extract_attrs/1" do
test "returns a changeset-friendly list of transaction attributes" do
transaction_attrs = TransactionImporter.extract_attrs(@raw_transaction)
assert transaction_attrs == @processed_transaction
end
end
describe "create_block_transaction/2" do
test "inserts a block transaction" do
block = insert(:block)
transaction = insert(:transaction)
TransactionImporter.create_block_transaction(transaction, block.hash)
block_transaction =
BlockTransaction
|> Repo.get_by(transaction_id: transaction.id, block_id: block.id)
assert block_transaction
end
test "updates an already existing block transaction" do
block = insert(:block)
transaction = insert(:transaction)
the_seventies = Timex.parse!("1970-01-01T00:00:18-00:00", "{ISO:Extended}")
block_transaction =
insert(:block_transaction, %{
block_id: block.id,
transaction_id: transaction.id,
inserted_at: the_seventies,
updated_at: the_seventies
})
update_block = insert(:block)
TransactionImporter.create_block_transaction(transaction, update_block.hash)
updated_block_transaction =
BlockTransaction
|> Repo.get_by(transaction_id: transaction.id)
refute block_transaction.block_id == updated_block_transaction.block_id
refute block_transaction.updated_at == updated_block_transaction.updated_at
end
end
end

@ -0,0 +1,93 @@
defmodule Explorer.Indexer.SequenceTest do
use ExUnit.Case
alias Explorer.Indexer.Sequence
test "start_link" do
{:ok, pid} = Sequence.start_link([{1, 4}], 5, 1)
assert state(pid) == %Sequence{
current: 5,
mode: :infinite,
queue: {[{1, 4}], []},
step: 1
}
end
test "inject_range" do
{:ok, pid} = Sequence.start_link([{1, 2}], 5, 1)
assert :ok = Sequence.inject_range(pid, {3, 4})
assert state(pid) == %Sequence{
current: 5,
mode: :infinite,
queue: {[{3, 4}], [{1, 2}]},
step: 1
}
end
test "cap" do
{:ok, pid} = Sequence.start_link([{1, 2}], 5, 1)
assert :ok = Sequence.cap(pid)
assert state(pid).mode == :finite
end
describe "pop" do
test "with a non-empty queue in finite and infinite modes" do
{:ok, pid} = Sequence.start_link([{1, 4}, {6, 9}], 99, 5)
assert {1, 4} == Sequence.pop(pid)
assert state(pid) == %Sequence{
current: 99,
mode: :infinite,
queue: {[], [{6, 9}]},
step: 5
}
:ok = Sequence.cap(pid)
assert {6, 9} == Sequence.pop(pid)
assert state(pid) == %Sequence{
current: 99,
mode: :finite,
queue: {[], []},
step: 5
}
end
test "with an empty queue in infinite mode" do
{:ok, pid} = Sequence.start_link([], 5, 5)
assert {5, 9} == Sequence.pop(pid)
assert state(pid) == %Sequence{
current: 10,
mode: :infinite,
queue: {[], []},
step: 5
}
end
test "with an empty queue in finite mode" do
{:ok, pid} = Sequence.start_link([], 5, 5)
:ok = Sequence.cap(pid)
assert :halt == Sequence.pop(pid)
assert state(pid) == %Sequence{
current: 5,
mode: :finite,
queue: {[], []},
step: 5
}
end
end
defp state(sequencer) do
Agent.get(sequencer, & &1)
end
end

@ -1,18 +0,0 @@
defmodule Explorer.SkippedBalancesTest do
use Explorer.DataCase
alias Explorer.SkippedBalances
describe "fetch/1" do
test "returns a list of address hashes that do not have balances" do
insert(:address, hash: "0xcashews", balance: nil)
assert SkippedBalances.fetch(1) == ["0xcashews"]
end
test "only get a limited set of addresses" do
insert_list(10, :address, balance: nil)
insert_list(5, :address, balance: 55)
assert length(SkippedBalances.fetch(7)) == 7
end
end
end

@ -1,77 +0,0 @@
defmodule Explorer.SkippedBlocksTest do
use Explorer.DataCase
alias Explorer.SkippedBlocks
describe "first/0 when there are no blocks" do
test "returns no blocks" do
assert SkippedBlocks.first() == []
end
end
describe "first/0 when there are no skipped blocks" do
test "returns no blocks" do
insert(:block, %{number: 0})
assert SkippedBlocks.first() == []
end
end
describe "first/0 when a block has been skipped" do
test "returns the first skipped block number" do
insert(:block, %{number: 0})
insert(:block, %{number: 2})
assert SkippedBlocks.first() == ["1"]
end
end
describe "first/1 when there are no blocks" do
test "returns no blocks" do
assert SkippedBlocks.first(1) == []
end
end
describe "first/1 when there are no skipped blocks" do
test "returns no blocks" do
insert(:block, %{number: 0})
assert SkippedBlocks.first(1) == []
end
end
describe "first/1 when a block has been skipped" do
test "returns the skipped block number" do
insert(:block, %{number: 1})
assert SkippedBlocks.first(1) == ["0"]
end
test "returns up to the requested number of skipped block numbers in reverse order" do
insert(:block, %{number: 1})
insert(:block, %{number: 3})
assert SkippedBlocks.first(1) == ["2"]
end
test "returns only the skipped block number" do
insert(:block, %{number: 1})
assert SkippedBlocks.first(100) == ["0"]
end
test "returns all the skipped block numbers in random order" do
insert(:block, %{number: 1})
insert(:block, %{number: 3})
block_ids = SkippedBlocks.first(100)
assert("2" in block_ids and "0" in block_ids)
end
end
describe "latest_block_number/0 when there are no blocks" do
test "returns -1" do
assert SkippedBlocks.latest_block_number() == -1
end
end
describe "latest_block_number/0 when there is a block" do
test "returns the number of the block" do
insert(:block, %{number: 1})
assert SkippedBlocks.latest_block_number() == 1
end
end
end

@ -1,26 +0,0 @@
defmodule Explorer.SkippedInternalTransactionsTest do
use Explorer.DataCase
alias Explorer.SkippedInternalTransactions
describe "first/0 when there are no transactions" do
test "returns no transaction hashes" do
assert SkippedInternalTransactions.first() == []
end
end
describe "first/0 when there are transactions with internal transactions" do
test "returns no transaction hashes" do
transaction = insert(:transaction)
insert(:internal_transaction, transaction: transaction)
assert SkippedInternalTransactions.first() == []
end
end
describe "first/0 when there are transactions with no internal transactions" do
test "returns the transaction hash" do
insert(:transaction, hash: "0xdeadbeef")
assert SkippedInternalTransactions.first() == ["0xdeadbeef"]
end
end
end

@ -1,54 +0,0 @@
defmodule Explorer.SkippedReceiptsTest do
use Explorer.DataCase
alias Explorer.SkippedReceipts
describe "first/0 when there are no transactions" do
test "returns no transactions" do
assert SkippedReceipts.first() == []
end
end
describe "first/0 when there are no skipped transactions" do
test "returns no transactions" do
transaction = insert(:transaction)
insert(:receipt, transaction: transaction)
assert SkippedReceipts.first() == []
end
end
describe "first/0 when a transaction has been skipped" do
test "returns the first skipped transaction hash" do
insert(:transaction, %{hash: "0xBEE75"})
assert SkippedReceipts.first() == ["0xBEE75"]
end
end
describe "first/1 when there are no transactions" do
test "returns no transactions" do
assert SkippedReceipts.first(1) == []
end
end
describe "first/1 when there are no skipped transactions" do
test "returns no transactions" do
transaction = insert(:transaction)
insert(:receipt, transaction: transaction)
assert SkippedReceipts.first(1) == []
end
end
describe "first/1 when a transaction has been skipped" do
test "returns the skipped transaction number" do
insert(:transaction, %{hash: "0xBEE75"})
assert SkippedReceipts.first(1) == ["0xBEE75"]
end
test "returns all the skipped transaction hashes in random order" do
insert(:transaction, %{hash: "0xBEE75"})
insert(:transaction, %{hash: "0xBE475"})
transaction_hashes = SkippedReceipts.first(100)
assert("0xBEE75" in transaction_hashes and "0xBE475" in transaction_hashes)
end
end
end

@ -1,39 +0,0 @@
defmodule Explorer.Workers.ImportBalanceTest do
import Mock
alias Explorer.Chain
alias Explorer.Chain.Address
alias Explorer.Workers.ImportBalance
use Explorer.DataCase
describe "perform/1" do
test "imports the balance for an address" do
ImportBalance.perform("0x1d12e5716c593b156eb7152ca4360f6224ba3b0a")
expected_balance = Decimal.new(1_572_374_181_095_000_000)
assert {:ok, %Address{balance: ^expected_balance}} =
Chain.hash_to_address("0x1d12e5716c593b156eb7152ca4360f6224ba3b0a")
end
end
describe "perform_later/1" do
test "delays the import of the balance for an address" do
with_mock Exq,
enqueue: fn _, _, _, _ ->
insert(
:address,
hash: "0xskateboards",
balance: 66
)
end do
ImportBalance.perform_later("0xskateboards")
expected_balance = Decimal.new(66)
assert {:ok, %Address{balance: ^expected_balance}} = Chain.hash_to_address("0xskateboards")
end
end
end
end

@ -1,66 +0,0 @@
defmodule Explorer.Workers.ImportBlockTest do
use Explorer.DataCase
import Mock
alias Explorer.Chain.Block
alias Explorer.Repo
alias Explorer.Workers.ImportBlock
describe "perform/1" do
test "imports the requested block number as an integer" do
use_cassette "import_block_perform_1_integer" do
ImportBlock.perform(1)
last_block = Block |> order_by(asc: :number) |> Repo.one()
assert last_block.number == 1
end
end
test "imports the requested block number as a string" do
use_cassette "import_block_perform_1_string" do
ImportBlock.perform("1")
last_block = Block |> order_by(asc: :number) |> Repo.one()
assert last_block.number == 1
end
end
test "imports the earliest block" do
use_cassette "import_block_perform_1_earliest" do
ImportBlock.perform("earliest")
last_block = Block |> order_by(asc: :number) |> Repo.one()
assert last_block.number == 0
end
end
test "imports the latest block" do
use_cassette "import_block_perform_1_latest" do
with_mock Exq, enqueue: fn _, _, _, [number] -> insert(:block, number: number) end do
ImportBlock.perform("latest")
last_block = Block |> order_by(asc: :number) |> Repo.one()
assert last_block.number > 0
end
end
end
test "when there is already a block with the requested hash" do
use_cassette "import_block_perform_1_duplicate" do
insert(:block, hash: "0x52c867bc0a91e573dc39300143c3bead7408d09d45bdb686749f02684ece72f3")
ImportBlock.perform("1")
block_count = Block |> Repo.all() |> Enum.count()
assert block_count == 1
end
end
end
describe "perform_later/1" do
test "does not retry fetching the latest block" do
use_cassette "import_block_perform_later_1_latest" do
with_mock Exq, enqueue: fn _, _, _, _ -> insert(:block, number: 1) end do
ImportBlock.perform_later("latest")
last_block = Block |> order_by(asc: :number) |> limit(1) |> Repo.one()
assert last_block.number == 1
end
end
end
end
end

@ -1,31 +0,0 @@
defmodule Explorer.Workers.ImportInternalTransactionTest do
use Explorer.DataCase
alias Explorer.Repo
alias Explorer.Chain.InternalTransaction
alias Explorer.Workers.ImportInternalTransaction
describe "perform/1" do
test "does not import the internal transactions when no transaction with the hash exists" do
use_cassette "import_internal_transaction_perform_1" do
assert_raise Ecto.NoResultsError, fn ->
ImportInternalTransaction.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926")
end
end
end
test "imports a receipt when an internal transaction with the hash exists" do
insert(
:transaction,
hash: "0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68"
)
use_cassette "import_internal_transaction_perform_1" do
ImportInternalTransaction.perform("0x051e031f05b3b3a5ff73e1189c36e3e2a41fd1c2d9772b2c75349e22ed4d3f68")
internal_transaction_count = InternalTransaction |> Repo.all() |> Enum.count()
assert internal_transaction_count == 2
end
end
end
end

@ -1,31 +0,0 @@
defmodule Explorer.Workers.ImportReceiptTest do
use Explorer.DataCase
alias Explorer.Repo
alias Explorer.Chain.Receipt
alias Explorer.Workers.ImportReceipt
describe "perform/1" do
test "does not import a receipt when no transaction with the hash exists" do
use_cassette "import_receipt_perform_1" do
ImportReceipt.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926")
assert Repo.one(Receipt) == nil
end
end
test "imports a receipt when a transaction with the hash exists" do
insert(
:transaction,
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926"
)
use_cassette "import_receipt_perform_1" do
ImportReceipt.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926")
receipt_count = Receipt |> Repo.all() |> Enum.count()
assert receipt_count == 1
end
end
end
end

@ -1,23 +0,0 @@
defmodule Explorer.Workers.ImportSkippedBlocksTest do
use Explorer.DataCase
import Mock
alias Explorer.Chain.Block
alias Explorer.Repo
alias Explorer.Workers.{ImportBlock, ImportSkippedBlocks}
describe "perform/1" do
test "imports the requested number of skipped blocks" do
insert(:block, %{number: 2})
use_cassette "import_skipped_blocks_perform_1" do
with_mock ImportBlock, perform_later: fn number -> insert(:block, number: number) end do
ImportSkippedBlocks.perform(1)
last_block = Block |> order_by(asc: :number) |> limit(1) |> Repo.one()
assert last_block.number == 1
end
end
end
end
end

@ -1,148 +0,0 @@
defmodule Explorer.Workers.ImportTransactionTest do
use Explorer.DataCase
import Mock
alias Explorer.Chain.{InternalTransaction, Receipt, Transaction}
alias Explorer.Repo
alias Explorer.Workers.ImportInternalTransaction
alias Explorer.Workers.ImportTransaction
describe "perform/1" do
test "imports the requested transaction hash" do
use_cassette "import_transaction_perform_1" do
with_mock Exq, enqueue: fn _, _, _, _ -> :ok end do
ImportTransaction.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926")
end
transaction = Transaction |> Repo.one()
assert transaction.hash == "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926"
end
end
test "when there is already a transaction with the requested hash" do
insert(
:transaction,
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926"
)
use_cassette "import_transaction_perform_1" do
with_mock Exq, enqueue: fn _, _, _, _ -> :ok end do
ImportTransaction.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926")
end
transaction_count = Transaction |> Repo.all() |> Enum.count()
assert transaction_count == 1
end
end
test "imports the receipt in another queue" do
transaction =
insert(
:transaction,
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926"
)
use_cassette "import_transaction_perform_1" do
with_mock Exq, enqueue: fn _, _, _, _ -> insert(:receipt, transaction: transaction) end do
with_mock ImportInternalTransaction, perform_later: fn _ -> :ok end do
ImportTransaction.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926")
receipt = Repo.one(Receipt)
refute is_nil(receipt)
end
end
end
end
test "imports the receipt in another queue when a map is supplied" do
transaction =
insert(
:transaction,
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926"
)
use_cassette "import_transaction_perform_1" do
with_mock Exq, enqueue: fn _, _, _, _ -> insert(:receipt, transaction: transaction) end do
with_mock ImportInternalTransaction, perform_later: fn _ -> :ok end do
ImportTransaction.perform(%{
"hash" => "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926",
"to" => "0xc001",
"from" => "0xbead5",
"blockHash" => "0xcafe"
})
receipt = Repo.one(Receipt)
refute is_nil(receipt)
end
end
end
end
test "imports the internal transactions in another queue" do
transaction =
insert(
:transaction,
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926"
)
use_cassette "import_transaction_perform_1" do
with_mock Exq, enqueue: fn _, _, _, _ -> :ok end do
with_mock ImportInternalTransaction,
perform_later: fn _ -> insert(:internal_transaction, transaction: transaction) end do
ImportTransaction.perform("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926")
internal_transaction = Repo.one(InternalTransaction)
refute is_nil(internal_transaction)
end
end
end
end
test "imports the internal transactions in another queue when a map is supplied" do
transaction =
insert(
:transaction,
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926"
)
use_cassette "import_transaction_perform_1" do
with_mock Exq, enqueue: fn _, _, _, _ -> :ok end do
with_mock ImportInternalTransaction,
perform_later: fn _ -> insert(:internal_transaction, transaction: transaction) end do
ImportTransaction.perform(%{
"hash" => "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926",
"to" => "0xc001",
"from" => "0xbead5",
"blockHash" => "0xcafe"
})
internal_transaction = Repo.one(InternalTransaction)
refute is_nil(internal_transaction)
end
end
end
end
end
describe "perform_later/1" do
test "imports the transaction in another queue" do
use_cassette "import_transaction_perform_1" do
with_mock Exq,
enqueue: fn _, _, _, _ ->
insert(
:transaction,
hash: "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926"
)
end do
ImportTransaction.perform_later("0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926")
transaction = Repo.one(Transaction)
assert transaction.hash == "0xf9a0959d5ccde33ec5221ddba1c6d7eaf9580a8d3512c7a1a60301362a98f926"
end
end
end
end
end

@ -1,52 +0,0 @@
defmodule Explorer.Workers.RefreshBalanceTest do
use Explorer.DataCase
import Mock
alias Explorer.Chain.{Credit, Debit}
alias Explorer.Workers.RefreshBalance
describe "perform/0" do
test "refreshes credit balances" do
with_mock Exq, enqueue: fn _, _, _, [type] -> RefreshBalance.perform(type) end do
address = insert(:address)
transaction = insert(:transaction, value: 20)
insert(:to_address, address: address, transaction: transaction)
insert(:receipt, transaction: transaction, status: 1)
RefreshBalance.perform()
assert Repo.one(Credit).value == Decimal.new(20)
end
end
test "refreshes debit balances" do
with_mock Exq, enqueue: fn _, _, _, [type] -> RefreshBalance.perform(type) end do
address = insert(:address)
transaction = insert(:transaction, value: 20)
insert(:from_address, address: address, transaction: transaction)
insert(:receipt, transaction: transaction, status: 1)
RefreshBalance.perform()
assert Repo.one(Debit).value == Decimal.new(20)
end
end
end
describe "perform/1" do
test "refreshes credit balances" do
address = insert(:address)
transaction = insert(:transaction, value: 20)
insert(:to_address, address: address, transaction: transaction)
insert(:receipt, transaction: transaction, status: 1)
RefreshBalance.perform("credit")
assert Repo.one(Credit).value == Decimal.new(20)
end
test "refreshes debit balances" do
address = insert(:address)
transaction = insert(:transaction, value: 20)
insert(:from_address, address: address, transaction: transaction)
insert(:receipt, transaction: transaction, status: 1)
RefreshBalance.perform("debit")
assert Repo.one(Debit).value == Decimal.new(20)
end
end
end

@ -1,9 +0,0 @@
defmodule Explorer.Chain.BlockTransactionFactory do
defmacro __using__(_opts) do
quote do
def block_transaction_factory do
%Explorer.Chain.BlockTransaction{}
end
end
end
end

@ -1,9 +0,0 @@
defmodule Explorer.Chain.FromAddressFactory do
defmacro __using__(_opts) do
quote do
def from_address_factory do
%Explorer.Chain.FromAddress{}
end
end
end
end

@ -1,9 +0,0 @@
defmodule Explorer.Chain.ToAddressFactory do
defmacro __using__(_opts) do
quote do
def to_address_factory do
%Explorer.Chain.ToAddress{}
end
end
end
end

@ -22,16 +22,6 @@ defmodule Explorer.Chain.TransactionFactory do
from_address_id: insert(:address).id
}
end
def with_block(transaction, block \\ nil) do
block = block || insert(:block)
insert(:block_transaction, %{block_id: block.id, transaction_id: transaction.id})
transaction
end
def list_with_block(transactions, block \\ nil) do
Enum.map(transactions, fn transaction -> with_block(transaction, block) end)
end
end
end
end

@ -3,11 +3,8 @@ defmodule Explorer.Factory do
use ExMachina.Ecto, repo: Explorer.Repo
use Explorer.Chain.AddressFactory
use Explorer.Chain.BlockFactory
use Explorer.Chain.BlockTransactionFactory
use Explorer.Chain.FromAddressFactory
use Explorer.Chain.InternalTransactionFactory
use Explorer.Chain.LogFactory
use Explorer.Chain.ReceiptFactory
use Explorer.Chain.ToAddressFactory
use Explorer.Chain.TransactionFactory
end

@ -24,8 +24,6 @@ config :ex_cldr,
locales: ["en"],
gettext: ExplorerWeb.Gettext
config :exq_ui, server: false
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"

@ -22,23 +22,6 @@ defmodule ExplorerWeb.Router do
plug(SetLocale, gettext: ExplorerWeb.Gettext, default_locale: "en")
end
pipeline :exq do
plug(:accepts, ["html"])
plug(:fetch_session)
plug(:fetch_flash)
plug(:put_secure_browser_headers, %{
"content-security-policy" => "\
default-src 'self';\
script-src 'self' 'unsafe-inline';\
font-src 'self' fonts.gstatic.com;\
style-src 'self' 'unsafe-inline' fonts.googleapis.com;\
"
})
plug(ExqUi.RouterPlug, namespace: "exq")
end
pipeline :jasmine do
if Mix.env() != :prod, do: plug(Jasmine, js_files: ["js/test.js"], css_files: ["css/test.css"])
end
@ -47,11 +30,6 @@ defmodule ExplorerWeb.Router do
plug(:accepts, ["json"])
end
scope "/exq", ExqUi do
pipe_through(:exq)
forward("/", RouterPlug.Router, :index)
end
scope "/", ExplorerWeb do
pipe_through(:browser)
pipe_through(:jasmine)

@ -46,9 +46,8 @@ defmodule ExplorerWeb.Mixfile do
defp elixirc_paths, do: ["lib"]
# Specifies extra applications to start per environment
defp extra_applications(:prod), do: [:phoenix_pubsub_redis, :exq, :exq_ui | extra_applications()]
defp extra_applications(:prod), do: [:phoenix_pubsub_redis | extra_applications()]
defp extra_applications(:dev), do: [:exq, :exq_ui | extra_applications()]
defp extra_applications(_), do: extra_applications()
defp extra_applications,
@ -61,8 +60,7 @@ defmodule ExplorerWeb.Mixfile do
:crontab,
:set_locale,
:logger,
:runtime_tools,
:new_relixir
:runtime_tools
]
# Specifies your project dependencies.
@ -90,7 +88,6 @@ defmodule ExplorerWeb.Mixfile do
{:junit_formatter, ">= 0.0.0", only: [:test], runtime: false},
{:math, "~> 0.3.0"},
{:mock, "~> 0.3.0", only: [:test], runtime: false},
{:new_relixir, "~> 0.4"},
{:phoenix, "~> 1.3.0"},
{:phoenix_ecto, "~> 3.2"},
{:phoenix_html, "~> 2.10"},

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save