commit
edfde284e4
@ -0,0 +1,14 @@ |
|||||||
|
body:after { |
||||||
|
position:absolute; width:0; height:0; overflow:hidden; z-index:-1; |
||||||
|
content: |
||||||
|
url(/images/network-selector-icons/callisto-mainnet.png) |
||||||
|
url(/images/network-selector-icons/ethereum-classic.png) |
||||||
|
url(/images/network-selector-icons/goerli-testnet.png) |
||||||
|
url(/images/network-selector-icons/kovan-testnet.png) |
||||||
|
url(/images/network-selector-icons/poa-core.png) |
||||||
|
url(/images/network-selector-icons/poa-sokol.png) |
||||||
|
url(/images/network-selector-icons/rinkeby-testnet.png) |
||||||
|
url(/images/network-selector-icons/rsk-mainnet.png) |
||||||
|
url(/images/network-selector-icons/ropsten-testnet.png) |
||||||
|
url(/images/network-selector-icons/xdai-chain.png) |
||||||
|
}; |
@ -0,0 +1,326 @@ |
|||||||
|
defmodule Explorer.Chain.OrderedCache do |
||||||
|
@moduledoc """ |
||||||
|
Behaviour for a cache of ordered elements. |
||||||
|
|
||||||
|
A macro based on `ConCache` is provided as well, at its minimum it can be used as; |
||||||
|
``` |
||||||
|
use Explorer.Chain.OrderedCache, name |
||||||
|
``` |
||||||
|
where `name is an `t:atom/0` identifying the cache. |
||||||
|
|
||||||
|
All default values can be modified by overriding their respective function or |
||||||
|
by setting an option. For example (showing all of them): |
||||||
|
``` |
||||||
|
use Explorer.Chain.OrderedCache, |
||||||
|
name: :name, # need to be set |
||||||
|
max_size: 51, # defaults to 100 |
||||||
|
ids_list_key: :ids_key, # defaults to `name` |
||||||
|
preloads: [] # defaults to [] |
||||||
|
``` |
||||||
|
Note: `preloads` can also be set singularly with the option `preload`, e.g.: |
||||||
|
``` |
||||||
|
use Explorer.Chain.OrderedCache, |
||||||
|
name: :cache |
||||||
|
preload: :block |
||||||
|
preload: :address |
||||||
|
preload: [transaction: :hash] |
||||||
|
``` |
||||||
|
Additionally all of the options accepted by `ConCache.start_link/1` can be |
||||||
|
provided as well. By default only `ttl_check_interval:` is set (to `false`). |
||||||
|
|
||||||
|
It's also possible, and advised, to override the implementation of the `c:prevails?/2` |
||||||
|
and `c:element_to_id/1` callbacks. |
||||||
|
For typechecking purposes it's also recommended to override the `t:element/0` |
||||||
|
and `t:id/0` type definitions. |
||||||
|
""" |
||||||
|
|
||||||
|
@type element :: struct() |
||||||
|
|
||||||
|
@type id :: term() |
||||||
|
|
||||||
|
@doc """ |
||||||
|
An atom that identifies this cache |
||||||
|
""" |
||||||
|
@callback cache_name :: atom() |
||||||
|
|
||||||
|
@doc """ |
||||||
|
The key used to store the (ordered) list of elements. |
||||||
|
Because this list is stored in the cache itself, one needs to make sure it is |
||||||
|
cannot be equal to any element id. |
||||||
|
""" |
||||||
|
@callback ids_list_key :: term() |
||||||
|
|
||||||
|
@doc """ |
||||||
|
The size that this cache cannot exceed. |
||||||
|
""" |
||||||
|
@callback max_size :: non_neg_integer() |
||||||
|
|
||||||
|
@doc """ |
||||||
|
Fields of the stored elements that need to be preloaded. |
||||||
|
For entities that are not stored in `Explorer.Repo` this should be empty. |
||||||
|
""" |
||||||
|
@callback preloads :: [term()] |
||||||
|
|
||||||
|
@doc """ |
||||||
|
The function that orders the elements and decides the ones that are stored. |
||||||
|
`prevails?(id_a, id_b)` should return `true` if (in case there is no space for both) |
||||||
|
the element with `id_a` should be stored instead of the element with `id_b`, |
||||||
|
`false` otherwise. |
||||||
|
""" |
||||||
|
@callback prevails?(id, id) :: boolean() |
||||||
|
|
||||||
|
@doc """ |
||||||
|
The function that obtains an unique `t:id/0` from an `t:element/0` |
||||||
|
""" |
||||||
|
@callback element_to_id(element()) :: id() |
||||||
|
|
||||||
|
@doc "Returns the list ids of the elements currently stored" |
||||||
|
@callback ids_list :: [id] |
||||||
|
|
||||||
|
@doc """ |
||||||
|
Fetches a element from its id, returns nil if not found |
||||||
|
""" |
||||||
|
@callback get(id) :: element | nil |
||||||
|
|
||||||
|
@doc """ |
||||||
|
Return the current number of elements stored |
||||||
|
""" |
||||||
|
@callback size() :: non_neg_integer() |
||||||
|
|
||||||
|
@doc """ |
||||||
|
Checks if there are enough elements stored |
||||||
|
""" |
||||||
|
@callback enough?(non_neg_integer()) :: boolean() |
||||||
|
|
||||||
|
@doc """ |
||||||
|
Checks if the number of elements stored is already the max allowed |
||||||
|
""" |
||||||
|
@callback full? :: boolean() |
||||||
|
|
||||||
|
@doc "Returns all the stored elements" |
||||||
|
@callback all :: [element] |
||||||
|
|
||||||
|
@doc "Returns the `n` most prevailing elements stored, based on `c:prevails?/2`" |
||||||
|
@callback take(integer()) :: [element] |
||||||
|
|
||||||
|
@doc """ |
||||||
|
Returns the `n` most prevailing elements, based on `c:prevails?/2`, unless there |
||||||
|
are not as many stored, in which case it returns `nil` |
||||||
|
""" |
||||||
|
@callback take_enough(integer()) :: [element] | nil |
||||||
|
|
||||||
|
@doc """ |
||||||
|
Adds an element, or a list of elements, to the cache. |
||||||
|
When the cache is full, only the most prevailing elements will be stored, based |
||||||
|
on `c:prevails?/2`. |
||||||
|
NOTE: every update is isolated from another one. |
||||||
|
""" |
||||||
|
@callback update([element] | element | nil) :: :ok |
||||||
|
|
||||||
|
defmacro __using__(name) when is_atom(name), do: do_using(name, []) |
||||||
|
|
||||||
|
defmacro __using__(opts) when is_list(opts) do |
||||||
|
# name is necessary |
||||||
|
name = Keyword.fetch!(opts, :name) |
||||||
|
do_using(name, opts) |
||||||
|
end |
||||||
|
|
||||||
|
# credo:disable-for-next-line /Complexity/ |
||||||
|
defp do_using(name, opts) when is_atom(name) and is_list(opts) do |
||||||
|
ids_list_key = Keyword.get(opts, :ids_list_key, name) |
||||||
|
max_size = Keyword.get(opts, :max_size, 100) |
||||||
|
preloads = Keyword.get(opts, :preloads) || Keyword.get_values(opts, :preload) |
||||||
|
|
||||||
|
concache_params = |
||||||
|
opts |
||||||
|
|> Keyword.drop([:ids_list_key, :max_size, :preloads, :preload]) |
||||||
|
|> Keyword.put_new(:ttl_check_interval, false) |
||||||
|
|
||||||
|
# credo:disable-for-next-line Credo.Check.Refactor.LongQuoteBlocks |
||||||
|
quote do |
||||||
|
alias Explorer.Chain.OrderedCache |
||||||
|
|
||||||
|
@behaviour OrderedCache |
||||||
|
|
||||||
|
### Automatically set functions |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def cache_name, do: unquote(name) |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def ids_list_key, do: unquote(ids_list_key) |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def max_size, do: unquote(max_size) |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def preloads, do: unquote(preloads) |
||||||
|
|
||||||
|
### Settable functions |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def prevails?(id_a, id_b), do: id_a > id_b |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def element_to_id(element), do: element |
||||||
|
|
||||||
|
### Straightforward fetching functions |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def ids_list, do: ConCache.get(cache_name(), ids_list_key()) || [] |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def get(id), do: ConCache.get(cache_name(), id) |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def size, do: ids_list() |> Enum.count() |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def enough?(amount), do: amount <= size() |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def full?, do: max_size() <= size() |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def all, do: Enum.map(ids_list(), &get(&1)) |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def take(amount) do |
||||||
|
ids_list() |
||||||
|
|> Enum.take(amount) |
||||||
|
|> Enum.map(&get(&1)) |
||||||
|
end |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def take_enough(amount) do |
||||||
|
# behaves just like `if enough?(amount), do: take(amount)` but fetching |
||||||
|
# the list only once |
||||||
|
ids = ids_list() |
||||||
|
|
||||||
|
if amount <= Enum.count(ids) do |
||||||
|
ids |
||||||
|
|> Enum.take(amount) |
||||||
|
|> Enum.map(&get(&1)) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
### Updating function |
||||||
|
|
||||||
|
@impl OrderedCache |
||||||
|
def update(elements) when is_nil(elements), do: :ok |
||||||
|
|
||||||
|
def update(elements) when is_list(elements) do |
||||||
|
ConCache.update(cache_name(), ids_list_key(), fn ids -> |
||||||
|
updated_list = |
||||||
|
elements |
||||||
|
|> Enum.map(&{element_to_id(&1), &1}) |
||||||
|
|> Enum.sort(&prevails?(&1, &2)) |
||||||
|
|> merge_and_update(ids || [], max_size()) |
||||||
|
|
||||||
|
{:ok, updated_list} |
||||||
|
end) |
||||||
|
end |
||||||
|
|
||||||
|
def update(element), do: update([element]) |
||||||
|
|
||||||
|
defp merge_and_update(_candidates, existing, 0) do |
||||||
|
# if there is no more space in the list remove the remaining existing |
||||||
|
# elements and return an empty list |
||||||
|
remove(existing) |
||||||
|
[] |
||||||
|
end |
||||||
|
|
||||||
|
defp merge_and_update([], existing, size) do |
||||||
|
# if there are no more candidates to be inserted keep as many of the |
||||||
|
# exsisting elements and remove the rest |
||||||
|
{remaining, to_remove} = Enum.split(existing, size) |
||||||
|
remove(to_remove) |
||||||
|
remaining |
||||||
|
end |
||||||
|
|
||||||
|
defp merge_and_update(candidates, [], size) do |
||||||
|
# if there are still candidates and no more existing value insert as many |
||||||
|
# candidates as possible and ignore the rest |
||||||
|
candidates |
||||||
|
|> Enum.take(size) |
||||||
|
|> Enum.map(fn {element_id, element} -> |
||||||
|
put_element(element_id, element) |
||||||
|
element_id |
||||||
|
end) |
||||||
|
end |
||||||
|
|
||||||
|
defp merge_and_update(candidates, existing, size) do |
||||||
|
[{candidate_id, candidate} | to_check] = candidates |
||||||
|
[head | tail] = existing |
||||||
|
|
||||||
|
cond do |
||||||
|
head == candidate_id -> |
||||||
|
# if a candidate has the id of and existing element, update its value |
||||||
|
put_element(candidate_id, candidate) |
||||||
|
[head | merge_and_update(to_check, tail, size - 1)] |
||||||
|
|
||||||
|
prevails?(head, candidate_id) -> |
||||||
|
# keep the prevaling existing value and compare all candidates against the rest |
||||||
|
[head | merge_and_update(candidates, tail, size - 1)] |
||||||
|
|
||||||
|
true -> |
||||||
|
# insert new prevailing candidate and compare the remaining ones with the rest |
||||||
|
put_element(candidate_id, candidate) |
||||||
|
[candidate_id | merge_and_update(to_check, existing, size - 1)] |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
defp remove(key) do |
||||||
|
# Always performs async removal so it can wait 1/10 of a second and |
||||||
|
# others have the time to get elements that were in the cache's list. |
||||||
|
# Different updates cannot interfere with the removed element because |
||||||
|
# if this was scheduled for removal it means it is too old, so following |
||||||
|
# updates cannot insert it in the future. |
||||||
|
Task.start(fn -> |
||||||
|
Process.sleep(100) |
||||||
|
|
||||||
|
if is_list(key) do |
||||||
|
Enum.map(key, &ConCache.delete(cache_name(), &1)) |
||||||
|
else |
||||||
|
ConCache.delete(cache_name(), key) |
||||||
|
end |
||||||
|
end) |
||||||
|
end |
||||||
|
|
||||||
|
defp put_element(element_id, element) do |
||||||
|
full_element = |
||||||
|
if Enum.empty?(preloads()) do |
||||||
|
element |
||||||
|
else |
||||||
|
Explorer.Repo.preload(element, preloads()) |
||||||
|
end |
||||||
|
|
||||||
|
# dirty puts are a little faster than puts with locks. |
||||||
|
# this is not a problem because this is the only function modifying rows |
||||||
|
# and it only gets called inside `update`, which works isolated |
||||||
|
ConCache.dirty_put(cache_name(), element_id, full_element) |
||||||
|
end |
||||||
|
|
||||||
|
### Supervisor's child specification |
||||||
|
|
||||||
|
@doc """ |
||||||
|
The child specification for a Supervisor. Note that all the `params` |
||||||
|
provided to this function will override the ones set by using the macro |
||||||
|
""" |
||||||
|
def child_spec(params) do |
||||||
|
params = Keyword.merge(unquote(concache_params), params) |
||||||
|
|
||||||
|
Supervisor.child_spec({ConCache, params}, id: child_id()) |
||||||
|
end |
||||||
|
|
||||||
|
def child_id, do: {ConCache, cache_name()} |
||||||
|
|
||||||
|
defoverridable cache_name: 0, |
||||||
|
ids_list_key: 0, |
||||||
|
max_size: 0, |
||||||
|
preloads: 0, |
||||||
|
prevails?: 2, |
||||||
|
element_to_id: 1 |
||||||
|
end |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,108 @@ |
|||||||
|
defmodule Explorer.ChainSpec.GenesisData do |
||||||
|
@moduledoc """ |
||||||
|
Fetches genesis data. |
||||||
|
""" |
||||||
|
|
||||||
|
use GenServer |
||||||
|
|
||||||
|
require Logger |
||||||
|
|
||||||
|
alias Explorer.ChainSpec.Parity.Importer |
||||||
|
alias HTTPoison.Response |
||||||
|
|
||||||
|
@interval :timer.minutes(2) |
||||||
|
|
||||||
|
def start_link(opts) do |
||||||
|
GenServer.start_link(__MODULE__, opts, name: __MODULE__) |
||||||
|
end |
||||||
|
|
||||||
|
@impl GenServer |
||||||
|
def init(_) do |
||||||
|
:timer.send_interval(@interval, :import) |
||||||
|
Process.send_after(self(), :import, @interval) |
||||||
|
|
||||||
|
{:ok, %{}} |
||||||
|
end |
||||||
|
|
||||||
|
# Callback for errored fetch |
||||||
|
@impl GenServer |
||||||
|
def handle_info({_ref, {:error, reason}}, state) do |
||||||
|
Logger.warn(fn -> "Failed to fetch genesis data '#{reason}'." end) |
||||||
|
|
||||||
|
fetch_genesis_data() |
||||||
|
|
||||||
|
{:noreply, state} |
||||||
|
end |
||||||
|
|
||||||
|
@impl GenServer |
||||||
|
def handle_info(:import, state) do |
||||||
|
Logger.debug(fn -> "Importing genesis data" end) |
||||||
|
|
||||||
|
fetch_genesis_data() |
||||||
|
|
||||||
|
{:noreply, state} |
||||||
|
end |
||||||
|
|
||||||
|
# Callback that a monitored process has shutdown |
||||||
|
@impl GenServer |
||||||
|
def handle_info({:DOWN, _, :process, _, _}, state) do |
||||||
|
{:noreply, state} |
||||||
|
end |
||||||
|
|
||||||
|
# Callback for successful fetch |
||||||
|
@impl GenServer |
||||||
|
def handle_info({_ref, _}, state) do |
||||||
|
{:noreply, state} |
||||||
|
end |
||||||
|
|
||||||
|
def fetch_genesis_data do |
||||||
|
path = Application.get_env(:explorer, __MODULE__)[:chain_spec_path] |
||||||
|
|
||||||
|
if path do |
||||||
|
Task.Supervisor.async_nolink(Explorer.GenesisDataTaskSupervisor, fn -> |
||||||
|
case fetch_spec(path) do |
||||||
|
{:ok, chain_spec} -> |
||||||
|
Importer.import_emission_rewards(chain_spec) |
||||||
|
{:ok, _} = Importer.import_genesis_coin_balances(chain_spec) |
||||||
|
|
||||||
|
{:error, reason} -> |
||||||
|
Logger.warn(fn -> "Failed to fetch genesis data. #{inspect(reason)}" end) |
||||||
|
end |
||||||
|
end) |
||||||
|
else |
||||||
|
Logger.warn(fn -> "Failed to fetch genesis data. Chain spec path is not set." end) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
defp fetch_spec(path) do |
||||||
|
if valid_url?(path) do |
||||||
|
fetch_from_url(path) |
||||||
|
else |
||||||
|
fetch_from_file(path) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
# sobelow_skip ["Traversal"] |
||||||
|
defp fetch_from_file(path) do |
||||||
|
with {:ok, data} <- File.read(path), |
||||||
|
{:ok, json} <- Jason.decode(data) do |
||||||
|
{:ok, json} |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
defp fetch_from_url(url) do |
||||||
|
case HTTPoison.get(url) do |
||||||
|
{:ok, %Response{body: body, status_code: 200}} -> |
||||||
|
{:ok, Jason.decode!(body)} |
||||||
|
|
||||||
|
reason -> |
||||||
|
{:error, reason} |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
defp valid_url?(string) do |
||||||
|
uri = URI.parse(string) |
||||||
|
|
||||||
|
uri.scheme != nil && uri.host =~ "." |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,107 @@ |
|||||||
|
defmodule Explorer.ChainSpec.Parity.Importer do |
||||||
|
@moduledoc """ |
||||||
|
Imports data from parity chain spec. |
||||||
|
""" |
||||||
|
|
||||||
|
alias Explorer.{Chain, Repo} |
||||||
|
alias Explorer.Chain.Block.{EmissionReward, Range} |
||||||
|
alias Explorer.Chain.Hash.Address, as: AddressHash |
||||||
|
alias Explorer.Chain.Wei |
||||||
|
|
||||||
|
@max_block_number :infinity |
||||||
|
|
||||||
|
def import_emission_rewards(chain_spec) do |
||||||
|
rewards = emission_rewards(chain_spec) |
||||||
|
|
||||||
|
{_, nil} = Repo.delete_all(EmissionReward) |
||||||
|
{_, nil} = Repo.insert_all(EmissionReward, rewards) |
||||||
|
end |
||||||
|
|
||||||
|
def import_genesis_coin_balances(chain_spec) do |
||||||
|
balance_params = |
||||||
|
chain_spec |
||||||
|
|> genesis_coin_balances() |
||||||
|
|> Stream.map(fn balance_map -> |
||||||
|
Map.put(balance_map, :block_number, 0) |
||||||
|
end) |
||||||
|
|> Enum.to_list() |
||||||
|
|
||||||
|
address_params = |
||||||
|
balance_params |
||||||
|
|> Stream.map(fn %{address_hash: hash} -> |
||||||
|
%{hash: hash} |
||||||
|
end) |
||||||
|
|> Enum.to_list() |
||||||
|
|
||||||
|
params = %{address_coin_balances: %{params: balance_params}, addresses: %{params: address_params}} |
||||||
|
|
||||||
|
Chain.import(params) |
||||||
|
end |
||||||
|
|
||||||
|
def genesis_coin_balances(chain_spec) do |
||||||
|
accounts = chain_spec["accounts"] |
||||||
|
|
||||||
|
parse_accounts(accounts) |
||||||
|
end |
||||||
|
|
||||||
|
def emission_rewards(chain_spec) do |
||||||
|
rewards = chain_spec["engine"]["Ethash"]["params"]["blockReward"] |
||||||
|
|
||||||
|
rewards |
||||||
|
|> parse_hex_numbers() |
||||||
|
|> format_ranges() |
||||||
|
end |
||||||
|
|
||||||
|
defp parse_accounts(accounts) do |
||||||
|
accounts |
||||||
|
|> Stream.filter(fn {_address, map} -> |
||||||
|
!is_nil(map["balance"]) |
||||||
|
end) |
||||||
|
|> Stream.map(fn {address, %{"balance" => value}} -> |
||||||
|
{:ok, address_hash} = AddressHash.cast(address) |
||||||
|
balance = parse_hex_number(value) |
||||||
|
|
||||||
|
%{address_hash: address_hash, value: balance} |
||||||
|
end) |
||||||
|
|> Enum.to_list() |
||||||
|
end |
||||||
|
|
||||||
|
defp format_ranges(block_number_reward_pairs) do |
||||||
|
block_number_reward_pairs |
||||||
|
|> Enum.chunk_every(2, 1) |
||||||
|
|> Enum.map(fn values -> |
||||||
|
create_range(values) |
||||||
|
end) |
||||||
|
end |
||||||
|
|
||||||
|
defp create_range([{block_number1, reward}, {block_number2, _}]) do |
||||||
|
block_number1 = if block_number1 != 0, do: block_number1 + 1, else: 0 |
||||||
|
|
||||||
|
%{ |
||||||
|
block_range: %Range{from: block_number1, to: block_number2}, |
||||||
|
reward: reward |
||||||
|
} |
||||||
|
end |
||||||
|
|
||||||
|
defp create_range([{block_number, reward}]) do |
||||||
|
%{ |
||||||
|
block_range: %Range{from: block_number + 1, to: @max_block_number}, |
||||||
|
reward: reward |
||||||
|
} |
||||||
|
end |
||||||
|
|
||||||
|
defp parse_hex_numbers(rewards) do |
||||||
|
Enum.map(rewards, fn {hex_block_number, hex_reward} -> |
||||||
|
block_number = parse_hex_number(hex_block_number) |
||||||
|
{:ok, reward} = hex_reward |> parse_hex_number() |> Wei.cast() |
||||||
|
|
||||||
|
{block_number, reward} |
||||||
|
end) |
||||||
|
end |
||||||
|
|
||||||
|
defp parse_hex_number("0x" <> hex_number) do |
||||||
|
{number, ""} = Integer.parse(hex_number, 16) |
||||||
|
|
||||||
|
number |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,108 @@ |
|||||||
|
defmodule Explorer.ChainSpec.Parity.ImporterTest do |
||||||
|
use Explorer.DataCase |
||||||
|
|
||||||
|
alias Explorer.Chain.Address.CoinBalance |
||||||
|
alias Explorer.Chain.Block.{EmissionReward, Range} |
||||||
|
alias Explorer.Chain.{Address, Hash, Wei} |
||||||
|
alias Explorer.ChainSpec.Parity.Importer |
||||||
|
alias Explorer.Repo |
||||||
|
|
||||||
|
@chain_spec "#{File.cwd!()}/test/support/fixture/chain_spec/foundation.json" |
||||||
|
|> File.read!() |
||||||
|
|> Jason.decode!() |
||||||
|
|
||||||
|
describe "emission_rewards/1" do |
||||||
|
test "fetches and formats reward ranges" do |
||||||
|
assert Importer.emission_rewards(@chain_spec) == [ |
||||||
|
%{ |
||||||
|
block_range: %Range{from: 0, to: 4_370_000}, |
||||||
|
reward: %Wei{value: Decimal.new(5_000_000_000_000_000_000)} |
||||||
|
}, |
||||||
|
%{ |
||||||
|
block_range: %Range{from: 4_370_001, to: 7_280_000}, |
||||||
|
reward: %Wei{value: Decimal.new(3_000_000_000_000_000_000)} |
||||||
|
}, |
||||||
|
%{ |
||||||
|
block_range: %Range{from: 7_280_001, to: :infinity}, |
||||||
|
reward: %Wei{value: Decimal.new(2_000_000_000_000_000_000)} |
||||||
|
} |
||||||
|
] |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
describe "import_emission_rewards/1" do |
||||||
|
test "inserts emission rewards from chain spec" do |
||||||
|
assert {3, nil} = Importer.import_emission_rewards(@chain_spec) |
||||||
|
end |
||||||
|
|
||||||
|
test "rewrites all recored" do |
||||||
|
old_block_rewards = %{ |
||||||
|
"0x0" => "0x1bc16d674ec80000", |
||||||
|
"0x42ae50" => "0x29a2241af62c0000", |
||||||
|
"0x6f1580" => "0x4563918244f40000" |
||||||
|
} |
||||||
|
|
||||||
|
chain_spec = %{ |
||||||
|
@chain_spec |
||||||
|
| "engine" => %{ |
||||||
|
@chain_spec["engine"] |
||||||
|
| "Ethash" => %{ |
||||||
|
@chain_spec["engine"]["Ethash"] |
||||||
|
| "params" => %{@chain_spec["engine"]["Ethash"]["params"] | "blockReward" => old_block_rewards} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
assert {3, nil} = Importer.import_emission_rewards(chain_spec) |
||||||
|
[first, second, third] = Repo.all(EmissionReward) |
||||||
|
|
||||||
|
assert first.reward == %Wei{value: Decimal.new(2_000_000_000_000_000_000)} |
||||||
|
assert first.block_range == %Range{from: 0, to: 4_370_000} |
||||||
|
|
||||||
|
assert second.reward == %Wei{value: Decimal.new(3_000_000_000_000_000_000)} |
||||||
|
assert second.block_range == %Range{from: 4_370_001, to: 7_280_000} |
||||||
|
|
||||||
|
assert third.reward == %Wei{value: Decimal.new(5_000_000_000_000_000_000)} |
||||||
|
assert third.block_range == %Range{from: 7_280_001, to: :infinity} |
||||||
|
|
||||||
|
assert {3, nil} = Importer.import_emission_rewards(@chain_spec) |
||||||
|
[new_first, new_second, new_third] = Repo.all(EmissionReward) |
||||||
|
|
||||||
|
assert new_first.reward == %Wei{value: Decimal.new(5_000_000_000_000_000_000)} |
||||||
|
assert new_first.block_range == %Range{from: 0, to: 4_370_000} |
||||||
|
|
||||||
|
assert new_second.reward == %Wei{value: Decimal.new(3_000_000_000_000_000_000)} |
||||||
|
assert new_second.block_range == %Range{from: 4_370_001, to: 7_280_000} |
||||||
|
|
||||||
|
assert new_third.reward == %Wei{value: Decimal.new(2_000_000_000_000_000_000)} |
||||||
|
assert new_third.block_range == %Range{from: 7_280_001, to: :infinity} |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
describe "genesis_coin_balances/1" do |
||||||
|
test "parses coin balance" do |
||||||
|
coin_balances = Importer.genesis_coin_balances(@chain_spec) |
||||||
|
|
||||||
|
assert Enum.count(coin_balances) == 403 |
||||||
|
|
||||||
|
assert %{ |
||||||
|
address_hash: %Hash{ |
||||||
|
byte_count: 20, |
||||||
|
bytes: <<121, 174, 179, 69, 102, 185, 116, 195, 90, 88, 129, 222, 192, 32, 146, 125, 167, 223, 93, 37>> |
||||||
|
}, |
||||||
|
value: 2_000_000_000_000_000_000_000 |
||||||
|
} == |
||||||
|
List.first(coin_balances) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
describe "import_genesis_coin_balances/1" do |
||||||
|
test "imports coin balances" do |
||||||
|
{:ok, %{address_coin_balances: address_coin_balances}} = Importer.import_genesis_coin_balances(@chain_spec) |
||||||
|
|
||||||
|
assert Enum.count(address_coin_balances) == 403 |
||||||
|
assert CoinBalance |> Repo.all() |> Enum.count() == 403 |
||||||
|
assert Address |> Repo.all() |> Enum.count() == 403 |
||||||
|
end |
||||||
|
end |
||||||
|
end |
File diff suppressed because it is too large
Load Diff
Loading…
Reference in new issue