Merge branch 'master' into mobile-menu-fix

pull/2737/head
Victor Baranov 5 years ago committed by GitHub
commit 934251ad7f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 15
      .circleci/config.yml
  2. 29
      CHANGELOG.md
  3. 4
      apps/block_scout_web/assets/css/components/_dashboard-banner.scss
  4. 6
      apps/block_scout_web/assets/js/lib/try_api.js
  5. 2
      apps/block_scout_web/lib/block_scout_web/controllers/address_controller.ex
  6. 2
      apps/block_scout_web/lib/block_scout_web/controllers/chain_controller.ex
  7. 8
      apps/block_scout_web/lib/block_scout_web/controllers/tokens/holder_controller.ex
  8. 7
      apps/block_scout_web/lib/block_scout_web/controllers/tokens/inventory_controller.ex
  9. 8
      apps/block_scout_web/lib/block_scout_web/controllers/tokens/read_contract_controller.ex
  10. 34
      apps/block_scout_web/lib/block_scout_web/controllers/tokens/token_controller.ex
  11. 7
      apps/block_scout_web/lib/block_scout_web/controllers/tokens/transfer_controller.ex
  12. 14
      apps/block_scout_web/lib/block_scout_web/notifier.ex
  13. 3
      apps/block_scout_web/lib/block_scout_web/templates/block/overview.html.eex
  14. 2
      apps/block_scout_web/lib/block_scout_web/templates/smart_contract/_functions.html.eex
  15. 8
      apps/block_scout_web/lib/block_scout_web/templates/tokens/overview/_details.html.eex
  16. 8
      apps/block_scout_web/lib/block_scout_web/templates/transaction_log/_logs.html.eex
  17. 2
      apps/block_scout_web/mix.exs
  18. 47
      apps/block_scout_web/priv/gettext/default.pot
  19. 47
      apps/block_scout_web/priv/gettext/en/LC_MESSAGES/default.po
  20. 16
      apps/block_scout_web/test/block_scout_web/channels/address_channel_test.exs
  21. 4
      apps/block_scout_web/test/block_scout_web/controllers/address_coin_balance_by_day_controller_test.exs
  22. 10
      apps/block_scout_web/test/block_scout_web/controllers/address_controller_test.exs
  23. 4
      apps/block_scout_web/test/block_scout_web/controllers/api/rpc/address_controller_test.exs
  24. 4
      apps/block_scout_web/test/block_scout_web/controllers/api/rpc/eth_controller_test.exs
  25. 8
      apps/block_scout_web/test/block_scout_web/controllers/chain_controller_test.exs
  26. 6
      apps/block_scout_web/test/block_scout_web/features/viewing_addresses_test.exs
  27. 6
      apps/block_scout_web/test/block_scout_web/features/viewing_app_test.exs
  28. 34
      apps/block_scout_web/test/block_scout_web/features/viewing_chain_test.exs
  29. 2
      apps/block_scout_web/test/support/conn_case.ex
  30. 2
      apps/block_scout_web/test/support/feature_case.ex
  31. 4
      apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipts.ex
  32. 13
      apps/explorer/config/config.exs
  33. 2
      apps/explorer/config/test.exs
  34. 5
      apps/explorer/lib/explorer/application.ex
  35. 149
      apps/explorer/lib/explorer/chain.ex
  36. 10
      apps/explorer/lib/explorer/chain/address.ex
  37. 2
      apps/explorer/lib/explorer/chain/address/coin_balance.ex
  38. 73
      apps/explorer/lib/explorer/chain/cache/accounts.ex
  39. 5
      apps/explorer/lib/explorer/chain/contract_method.ex
  40. 4
      apps/explorer/lib/explorer/chain/import.ex
  41. 2
      apps/explorer/lib/explorer/chain/import/runner/address/coin_balances.ex
  42. 18
      apps/explorer/lib/explorer/chain/import/runner/address/current_token_balances.ex
  43. 2
      apps/explorer/lib/explorer/chain/import/runner/address/token_balances.ex
  44. 21
      apps/explorer/lib/explorer/chain/import/runner/addresses.ex
  45. 2
      apps/explorer/lib/explorer/chain/import/runner/block/rewards.ex
  46. 2
      apps/explorer/lib/explorer/chain/import/runner/block/second_degree_relations.ex
  47. 584
      apps/explorer/lib/explorer/chain/import/runner/blocks.ex
  48. 47
      apps/explorer/lib/explorer/chain/import/runner/internal_transactions.ex
  49. 30
      apps/explorer/lib/explorer/chain/import/runner/internal_transactions_indexed_at_blocks.ex
  50. 2
      apps/explorer/lib/explorer/chain/import/runner/logs.ex
  51. 37
      apps/explorer/lib/explorer/chain/import/runner/staking_pools.ex
  52. 5
      apps/explorer/lib/explorer/chain/import/runner/staking_pools_delegators.ex
  53. 2
      apps/explorer/lib/explorer/chain/import/runner/token_transfers.ex
  54. 125
      apps/explorer/lib/explorer/chain/import/runner/tokens.ex
  55. 4
      apps/explorer/lib/explorer/chain/import/runner/transaction/forks.ex
  56. 70
      apps/explorer/lib/explorer/chain/import/runner/transactions.ex
  57. 20
      apps/explorer/lib/explorer/chain/import/stage.ex
  58. 28
      apps/explorer/lib/explorer/chain/import/stage/address_referencing.ex
  59. 30
      apps/explorer/lib/explorer/chain/import/stage/block_following.ex
  60. 30
      apps/explorer/lib/explorer/chain/import/stage/block_referencing.ex
  61. 25
      apps/explorer/lib/explorer/chain_spec/parity/importer.ex
  62. 25
      apps/explorer/lib/explorer/chain_spec/poa/importer.ex
  63. 125
      apps/explorer/lib/explorer/counters/addresses_counter.ex
  64. 34
      apps/explorer/lib/explorer/exchange_rates/source/coin_gecko.ex
  65. 5
      apps/explorer/lib/explorer/market/market.ex
  66. 15
      apps/explorer/lib/explorer/smart_contract/verifier.ex
  67. 5
      apps/explorer/lib/explorer/validator/metadata_importer.ex
  68. 7
      apps/explorer/priv/repo/migrations/20190910170703_create_indexes_for_block_number_in_token_transfers_and_transactions.exs
  69. 42
      apps/explorer/test/explorer/chain/cache/accounts_test.exs
  70. 69
      apps/explorer/test/explorer/chain/import/runner/addresses_test.exs
  71. 104
      apps/explorer/test/explorer/chain/import/runner/blocks_test.exs
  72. 37
      apps/explorer/test/explorer/chain_test.exs
  73. 16
      apps/explorer/test/explorer/counters/addresses_counter_test.exs
  74. 101
      apps/explorer/test/explorer/exchange_rates/source/coin_gecko_test.exs
  75. 28
      apps/explorer/test/explorer/smart_contract/verifier_test.exs
  76. 2
      apps/explorer/test/support/data_case.ex
  77. 7
      apps/indexer/lib/indexer/block/fetcher.ex
  78. 3
      apps/indexer/lib/indexer/block/realtime/fetcher.ex
  79. 5
      apps/indexer/lib/indexer/fetcher/block_reward.ex
  80. 5
      apps/indexer/lib/indexer/fetcher/coin_balance.ex
  81. 8
      apps/indexer/lib/indexer/fetcher/coin_balance_on_demand.ex
  82. 4
      apps/indexer/lib/indexer/fetcher/contract_code.ex
  83. 3
      apps/indexer/lib/indexer/fetcher/internal_transaction.ex
  84. 4
      apps/indexer/lib/indexer/fetcher/pending_transaction.ex
  85. 4
      apps/indexer/lib/indexer/fetcher/uncle_block.ex
  86. 31
      apps/indexer/lib/indexer/temporary/blocks_transactions_mismatch.ex
  87. 14
      apps/indexer/test/indexer/block/fetcher/receipts_test.exs
  88. 3
      docs/env-variables.md
  89. 137
      docs/sharelocks.md
  90. 2
      mix.lock

@ -33,12 +33,15 @@ jobs:
- restore_cache: - restore_cache:
keys: keys:
- v7-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.lock" }} - v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.lock" }}
- v7-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.exs" }} - v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.exs" }}
- v7-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }} - v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}
- run: mix deps.get - run: mix deps.get
- run:
command: sed -i '68,68 s/^/%/' ./deps/hackney/src/hackney_ssl.erl
- restore_cache: - restore_cache:
keys: keys:
- v7-npm-install-{{ .Branch }}-{{ checksum "apps/block_scout_web/assets/package-lock.json" }} - v7-npm-install-{{ .Branch }}-{{ checksum "apps/block_scout_web/assets/package-lock.json" }}
@ -83,17 +86,17 @@ jobs:
# `deps` needs to be cached with `_build` because `_build` will symlink into `deps` # `deps` needs to be cached with `_build` because `_build` will symlink into `deps`
- save_cache: - save_cache:
key: v7-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.lock" }} key: v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.lock" }}
paths: paths:
- deps - deps
- _build - _build
- save_cache: - save_cache:
key: v7-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.exs" }} key: v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.exs" }}
paths: paths:
- deps - deps
- _build - _build
- save_cache: - save_cache:
key: v7-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }} key: v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}
paths: paths:
- deps - deps
- _build - _build

@ -1,17 +1,39 @@
## Current ## Current
### Features ### Features
- [#2665](https://github.com/poanetwork/blockscout/pull/2665) - new menu layout for mobile devices - [#2717](https://github.com/poanetwork/blockscout/pull/2717) - Improve speed of nonconsensus data removal
- [#2679](https://github.com/poanetwork/blockscout/pull/2679) - added fixed height for card chain blocks and card chain transactions - [#2679](https://github.com/poanetwork/blockscout/pull/2679) - added fixed height for card chain blocks and card chain transactions
- [#2678](https://github.com/poanetwork/blockscout/pull/2678) - fixed dashboard banner height bug - [#2678](https://github.com/poanetwork/blockscout/pull/2678) - fixed dashboard banner height bug
- [#2672](https://github.com/poanetwork/blockscout/pull/2672) - added new theme for xUSDT - [#2672](https://github.com/poanetwork/blockscout/pull/2672) - added new theme for xUSDT
- [#2667](https://github.com/poanetwork/blockscout/pull/2667) - Add ETS-based cache for accounts page
- [#2666](https://github.com/poanetwork/blockscout/pull/2666) - fetch token counters in parallel
- [#2665](https://github.com/poanetwork/blockscout/pull/2665) - new menu layout for mobile devices
- [#2663](https://github.com/poanetwork/blockscout/pull/2663) - Fetch address counters in parallel - [#2663](https://github.com/poanetwork/blockscout/pull/2663) - Fetch address counters in parallel
### Fixes ### Fixes
- [#2737](https://github.com/poanetwork/blockscout/pull/2737) - switched hardcoded subnetwork value to elixir expression for mobile menu - [#2737](https://github.com/poanetwork/blockscout/pull/2737) - switched hardcoded subnetwork value to elixir expression for mobile menu
- [#2736](https://github.com/poanetwork/blockscout/pull/2736) - do not update cache if no blocks were inserted
- [#2731](https://github.com/poanetwork/blockscout/pull/2731) - fix library verification
- [#2718](https://github.com/poanetwork/blockscout/pull/2718) - Include all addresses taking part in transactions in wallets' addresses counter
- [#2709](https://github.com/poanetwork/blockscout/pull/2709) - Fix stuck label and value for uncle block height
- [#2707](https://github.com/poanetwork/blockscout/pull/2707) - fix for dashboard banner chart legend items
- [#2706](https://github.com/poanetwork/blockscout/pull/2706) - fix empty total_supply in coin gecko response
- [#2701](https://github.com/poanetwork/blockscout/pull/2701) - Exclude nonconsensus blocks from avg block time calculation by default
- [#2696](https://github.com/poanetwork/blockscout/pull/2696) - do not update fetched_coin_balance with nil
- [#2693](https://github.com/poanetwork/blockscout/pull/2693) - remove non consensus internal transactions
- [#2691](https://github.com/poanetwork/blockscout/pull/2691) - fix exchange rate websocket update for Rootstock
- [#2688](https://github.com/poanetwork/blockscout/pull/2688) - fix try it out section
- [#2687](https://github.com/poanetwork/blockscout/pull/2687) - remove non-consensus token transfers, logs when inserting new consensus blocks
- [#2684](https://github.com/poanetwork/blockscout/pull/2684) - do not filter pending logs
- [#2682](https://github.com/poanetwork/blockscout/pull/2682) - Use Task.start instead of Task.async in caches - [#2682](https://github.com/poanetwork/blockscout/pull/2682) - Use Task.start instead of Task.async in caches
- [#2671](https://github.com/poanetwork/blockscout/pull/2671) - fixed buttons color at smart contract section
- [#2660](https://github.com/poanetwork/blockscout/pull/2660) - set correct last value for coin balances chart data
- [#2619](https://github.com/poanetwork/blockscout/pull/2619) - Enforce DB transaction's order to prevent deadlocks
### Chore ### Chore
- [#2724](https://github.com/poanetwork/blockscout/pull/2724) - fix ci by commenting a line in hackney library
- [#2708](https://github.com/poanetwork/blockscout/pull/2708) - add log index to logs view
- [#2723](https://github.com/poanetwork/blockscout/pull/2723) - get rid of ex_json_schema warnings
## 2.0.4-beta ## 2.0.4-beta
@ -49,6 +71,7 @@
- [#2468](https://github.com/poanetwork/blockscout/pull/2468) - fix confirmations for non consensus blocks - [#2468](https://github.com/poanetwork/blockscout/pull/2468) - fix confirmations for non consensus blocks
### Chore ### Chore
- [#2662](https://github.com/poanetwork/blockscout/pull/2662) - fetch coin gecko id based on the coin symbol
- [#2646](https://github.com/poanetwork/blockscout/pull/2646) - Added Xerom to list of Additional Chains using BlockScout - [#2646](https://github.com/poanetwork/blockscout/pull/2646) - Added Xerom to list of Additional Chains using BlockScout
- [#2634](https://github.com/poanetwork/blockscout/pull/2634) - add Lukso to networks dropdown - [#2634](https://github.com/poanetwork/blockscout/pull/2634) - add Lukso to networks dropdown
- [#2617](https://github.com/poanetwork/blockscout/pull/2617) - skip cache update if there are no blocks inserted - [#2617](https://github.com/poanetwork/blockscout/pull/2617) - skip cache update if there are no blocks inserted

@ -72,7 +72,7 @@ $dashboard-banner-chart-axis-font-color: $dashboard-stats-item-value-color !defa
} }
.dashboard-banner-chart-legend { .dashboard-banner-chart-legend {
display: flex; display: grid;
grid-template-columns: 1fr 1fr; grid-template-columns: 1fr 1fr;
padding-bottom: 12px; padding-bottom: 12px;
@ -81,7 +81,7 @@ $dashboard-banner-chart-axis-font-color: $dashboard-stats-item-value-color !defa
padding-left: 12px; padding-left: 12px;
padding-top: 3px; padding-top: 3px;
position: relative; position: relative;
padding-right: 60px; padding-right: 12px;
@include media-breakpoint-down(md) { @include media-breakpoint-down(md) {
display: flex; display: flex;

@ -55,10 +55,6 @@ function handleSuccess (query, xhr, clickedButton) {
clickedButton.prop('disabled', false) clickedButton.prop('disabled', false)
} }
function dropDomain (url) {
return new URL(url).pathname
}
// Show 'Try it out' UI for a module/action. // Show 'Try it out' UI for a module/action.
$('button[data-selector*="btn-try-api"]').click(event => { $('button[data-selector*="btn-try-api"]').click(event => {
const clickedButton = $(event.target) const clickedButton = $(event.target)
@ -128,7 +124,7 @@ $('button[data-try-api-ui-button-type="execute"]').click(event => {
} }
$.ajax({ $.ajax({
url: dropDomain(composeRequestUrl(query)), url: composeRequestUrl(query),
success: (_data, _status, xhr) => { success: (_data, _status, xhr) => {
handleSuccess(query, xhr, clickedButton) handleSuccess(query, xhr, clickedButton)
}, },

@ -61,7 +61,7 @@ defmodule BlockScoutWeb.AddressController do
def index(conn, _params) do def index(conn, _params) do
render(conn, "index.html", render(conn, "index.html",
current_path: current_path(conn), current_path: current_path(conn),
address_count: Chain.count_addresses_with_balance_from_cache() address_count: Chain.count_addresses_from_cache()
) )
end end

@ -28,7 +28,7 @@ defmodule BlockScoutWeb.ChainController do
render( render(
conn, conn,
"show.html", "show.html",
address_count: Chain.count_addresses_with_balance_from_cache(), address_count: Chain.count_addresses_from_cache(),
average_block_time: AverageBlockTime.average_block_time(), average_block_time: AverageBlockTime.average_block_time(),
exchange_rate: exchange_rate, exchange_rate: exchange_rate,
chart_data_path: market_history_chart_path(conn, :show), chart_data_path: market_history_chart_path(conn, :show),

@ -12,6 +12,8 @@ defmodule BlockScoutWeb.Tokens.HolderController do
next_page_params: 3 next_page_params: 3
] ]
import BlockScoutWeb.Tokens.TokenController, only: [fetch_token_counters: 2]
def index(conn, %{"token_id" => address_hash_string, "type" => "JSON"} = params) do def index(conn, %{"token_id" => address_hash_string, "type" => "JSON"} = params) do
with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string),
{:ok, token} <- Chain.token_from_address_hash(address_hash), {:ok, token} <- Chain.token_from_address_hash(address_hash),
@ -47,13 +49,15 @@ defmodule BlockScoutWeb.Tokens.HolderController do
with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string),
{:ok, token} <- Chain.token_from_address_hash(address_hash, options) do {:ok, token} <- Chain.token_from_address_hash(address_hash, options) do
{total_token_transfers, total_token_holders} = fetch_token_counters(token, address_hash)
render( render(
conn, conn,
"index.html", "index.html",
current_path: current_path(conn), current_path: current_path(conn),
token: Market.add_price(token), token: Market.add_price(token),
total_token_holders: token.holder_count || Chain.count_token_holders_from_token_hash(address_hash), total_token_holders: total_token_holders,
total_token_transfers: Chain.count_token_transfers_from_token_hash(address_hash) total_token_transfers: total_token_transfers
) )
else else
:error -> :error ->

@ -7,6 +7,7 @@ defmodule BlockScoutWeb.Tokens.InventoryController do
alias Phoenix.View alias Phoenix.View
import BlockScoutWeb.Chain, only: [split_list_by_page: 1, default_paging_options: 0] import BlockScoutWeb.Chain, only: [split_list_by_page: 1, default_paging_options: 0]
import BlockScoutWeb.Tokens.TokenController, only: [fetch_token_counters: 2]
def index(conn, %{"token_id" => address_hash_string, "type" => "JSON"} = params) do def index(conn, %{"token_id" => address_hash_string, "type" => "JSON"} = params) do
with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string),
@ -64,13 +65,15 @@ defmodule BlockScoutWeb.Tokens.InventoryController do
with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string),
{:ok, token} <- Chain.token_from_address_hash(address_hash, options) do {:ok, token} <- Chain.token_from_address_hash(address_hash, options) do
{total_token_transfers, total_token_holders} = fetch_token_counters(token, address_hash)
render( render(
conn, conn,
"index.html", "index.html",
current_path: current_path(conn), current_path: current_path(conn),
token: Market.add_price(token), token: Market.add_price(token),
total_token_transfers: Chain.count_token_transfers_from_token_hash(address_hash), total_token_transfers: total_token_transfers,
total_token_holders: token.holder_count || Chain.count_token_holders_from_token_hash(address_hash) total_token_holders: total_token_holders
) )
else else
:error -> :error ->

@ -3,17 +3,21 @@ defmodule BlockScoutWeb.Tokens.ReadContractController do
alias Explorer.{Chain, Market} alias Explorer.{Chain, Market}
import BlockScoutWeb.Tokens.TokenController, only: [fetch_token_counters: 2]
def index(conn, %{"token_id" => address_hash_string}) do def index(conn, %{"token_id" => address_hash_string}) do
options = [necessity_by_association: %{[contract_address: :smart_contract] => :optional}] options = [necessity_by_association: %{[contract_address: :smart_contract] => :optional}]
with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string),
{:ok, token} <- Chain.token_from_address_hash(address_hash, options) do {:ok, token} <- Chain.token_from_address_hash(address_hash, options) do
{total_token_transfers, total_token_holders} = fetch_token_counters(token, address_hash)
render( render(
conn, conn,
"index.html", "index.html",
token: Market.add_price(token), token: Market.add_price(token),
total_token_transfers: token.holder_count || Chain.count_token_transfers_from_token_hash(address_hash), total_token_transfers: total_token_transfers,
total_token_holders: Chain.count_token_holders_from_token_hash(address_hash) total_token_holders: total_token_holders
) )
else else
:error -> :error ->

@ -1,7 +1,41 @@
defmodule BlockScoutWeb.Tokens.TokenController do defmodule BlockScoutWeb.Tokens.TokenController do
use BlockScoutWeb, :controller use BlockScoutWeb, :controller
require Logger
alias Explorer.Chain
def show(conn, %{"id" => address_hash_string}) do def show(conn, %{"id" => address_hash_string}) do
redirect(conn, to: token_transfer_path(conn, :index, address_hash_string)) redirect(conn, to: token_transfer_path(conn, :index, address_hash_string))
end end
def fetch_token_counters(token, address_hash) do
total_token_transfers_task =
Task.async(fn ->
Chain.count_token_transfers_from_token_hash(address_hash)
end)
total_token_holders_task =
Task.async(fn ->
token.holder_count || Chain.count_token_holders_from_token_hash(address_hash)
end)
[total_token_transfers_task, total_token_holders_task]
|> Task.yield_many(:timer.seconds(40))
|> Enum.map(fn {_task, res} ->
case res do
{:ok, result} ->
result
{:exit, reason} ->
Logger.warn("Query fetching token counters terminated: #{inspect(reason)}")
0
nil ->
Logger.warn("Query fetching token counters timed out.")
0
end
end)
|> List.to_tuple()
end
end end

@ -6,6 +6,7 @@ defmodule BlockScoutWeb.Tokens.TransferController do
alias Phoenix.View alias Phoenix.View
import BlockScoutWeb.Chain, only: [split_list_by_page: 1, paging_options: 1, next_page_params: 3] import BlockScoutWeb.Chain, only: [split_list_by_page: 1, paging_options: 1, next_page_params: 3]
import BlockScoutWeb.Tokens.TokenController, only: [fetch_token_counters: 2]
def index(conn, %{"token_id" => address_hash_string, "type" => "JSON"} = params) do def index(conn, %{"token_id" => address_hash_string, "type" => "JSON"} = params) do
with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string),
@ -48,13 +49,15 @@ defmodule BlockScoutWeb.Tokens.TransferController do
with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string),
{:ok, token} <- Chain.token_from_address_hash(address_hash, options) do {:ok, token} <- Chain.token_from_address_hash(address_hash, options) do
{total_token_transfers, total_token_holders} = fetch_token_counters(token, address_hash)
render( render(
conn, conn,
"index.html", "index.html",
current_path: current_path(conn), current_path: current_path(conn),
token: Market.add_price(token), token: Market.add_price(token),
total_token_transfers: Chain.count_token_transfers_from_token_hash(address_hash), total_token_transfers: total_token_transfers,
total_token_holders: token.holder_count || Chain.count_token_holders_from_token_hash(address_hash) total_token_holders: total_token_holders
) )
else else
:error -> :error ->

@ -7,13 +7,14 @@ defmodule BlockScoutWeb.Notifier do
alias BlockScoutWeb.{AddressContractVerificationView, Endpoint} alias BlockScoutWeb.{AddressContractVerificationView, Endpoint}
alias Explorer.{Chain, Market, Repo} alias Explorer.{Chain, Market, Repo}
alias Explorer.Chain.{Address, InternalTransaction, Transaction} alias Explorer.Chain.{Address, InternalTransaction, Transaction}
alias Explorer.Chain.Supply.RSK
alias Explorer.Counters.AverageBlockTime alias Explorer.Counters.AverageBlockTime
alias Explorer.ExchangeRates.Token alias Explorer.ExchangeRates.Token
alias Explorer.SmartContract.{Solidity.CodeCompiler, Solidity.CompilerVersion} alias Explorer.SmartContract.{Solidity.CodeCompiler, Solidity.CompilerVersion}
alias Phoenix.View alias Phoenix.View
def handle_event({:chain_event, :addresses, type, addresses}) when type in [:realtime, :on_demand] do def handle_event({:chain_event, :addresses, type, addresses}) when type in [:realtime, :on_demand] do
Endpoint.broadcast("addresses:new_address", "count", %{count: Chain.count_addresses_with_balance_from_cache()}) Endpoint.broadcast("addresses:new_address", "count", %{count: Chain.count_addresses_from_cache()})
addresses addresses
|> Stream.reject(fn %Address{fetched_coin_balance: fetched_coin_balance} -> is_nil(fetched_coin_balance) end) |> Stream.reject(fn %Address{fetched_coin_balance: fetched_coin_balance} -> is_nil(fetched_coin_balance) end)
@ -76,8 +77,17 @@ defmodule BlockScoutWeb.Notifier do
data -> data data -> data
end end
exchange_rate_with_available_supply =
case Application.get_env(:explorer, :supply) do
RSK ->
%{exchange_rate | available_supply: nil, market_cap_usd: RSK.market_cap(exchange_rate)}
_ ->
exchange_rate
end
Endpoint.broadcast("exchange_rate:new_rate", "new_rate", %{ Endpoint.broadcast("exchange_rate:new_rate", "new_rate", %{
exchange_rate: exchange_rate, exchange_rate: exchange_rate_with_available_supply,
market_history_data: Enum.map(market_history_data, fn day -> Map.take(day, [:closing_price, :date]) end) market_history_data: Enum.map(market_history_data, fn day -> Map.take(day, [:closing_price, :date]) end)
}) })
end end

@ -12,8 +12,7 @@
<%= if block_type(@block) == "Block" do %> <%= if block_type(@block) == "Block" do %>
<%= gettext("Block Height: %{height}", height: @block.number) %> <%= if @block.number == 0, do: "- " <> gettext("Genesis Block")%> <%= gettext("Block Height: %{height}", height: @block.number) %> <%= if @block.number == 0, do: "- " <> gettext("Genesis Block")%>
<% else %> <% else %>
<%= gettext("%{block_type} Height:", block_type: block_type(@block)) %> <%= gettext("%{block_type} Height:", block_type: block_type(@block)) %> <%= link(@block, to: block_path(BlockScoutWeb.Endpoint, :show, @block.number)) %>
<%= link(@block, to: block_path(BlockScoutWeb.Endpoint, :show, @block.number)) %>
<% end %> <% end %>
</h3> </h3>
<div class="d-flex justify-content-start text-muted block-details-row"> <div class="d-flex justify-content-start text-muted block-details-row">

@ -19,7 +19,7 @@
</div> </div>
<% end %> <% end %>
<input type="submit" value='<%= gettext("Query")%>' class="button button-secondary button-xs py-0 mt-2" /> <input type="submit" value='<%= gettext("Query")%>' class="button btn-line button-xs py-0 mt-2" style="padding: 6px 8px!important;height: 26px;font-size: 11px;" />
</form> </form>
<div class='p-2 text-muted <%= if (queryable?(function["inputs"]) == true), do: "w-100" %>'> <div class='p-2 text-muted <%= if (queryable?(function["inputs"]) == true), do: "w-100" %>'>

@ -57,8 +57,12 @@
</span> </span>
<div class="d-flex flex-row justify-content-start text-muted"> <div class="d-flex flex-row justify-content-start text-muted">
<span class="mr-4"> <%= @token.type %> </span> <span class="mr-4"> <%= @token.type %> </span>
<span class="mr-4"><%= @total_token_holders %> <%= gettext "Addresses" %></span> <%= if @total_token_holders > 0 do %>
<span class="mr-4"><%= @total_token_transfers %> <%= gettext "Transfers" %></span> <span class="mr-4"><%= @total_token_holders %> <%= gettext "Addresses" %></span>
<% end %>
<%= if @total_token_transfers > 0 do %>
<span class="mr-4"><%= @total_token_transfers %> <%= gettext "Transfers" %></span>
<% end %>
<%= if decimals?(@token) do %> <%= if decimals?(@token) do %>
<span class="mr-4"><%= @token.decimals %> <%= gettext "Decimals" %></span> <span class="mr-4"><%= @token.decimals %> <%= gettext "Decimals" %></span>
<% end %> <% end %>

@ -185,5 +185,13 @@
</div> </div>
<% end %> <% end %>
</dd> </dd>
<dt class="col-md-2">
<%= gettext "Log Index" %>
</dt>
<dd class="col-md-10">
<div class="text-dark raw-transaction-log-index">
<%= @log.index %>
</div>
</dd>
</dl> </dl>
</div> </div>

@ -131,7 +131,7 @@ defmodule BlockScoutWeb.Mixfile do
# `:cowboy` `~> 2.0` and Phoenix 1.4 compatibility # `:cowboy` `~> 2.0` and Phoenix 1.4 compatibility
{:wobserver, "~> 0.2.0", github: "poanetwork/wobserver", branch: "support-https"}, {:wobserver, "~> 0.2.0", github: "poanetwork/wobserver", branch: "support-https"},
{:phoenix_form_awesomplete, "~> 0.1.4"}, {:phoenix_form_awesomplete, "~> 0.1.4"},
{:ex_json_schema, "~> 0.6.1"} {:ex_json_schema, "~> 0.6.2"}
] ]
end end

@ -43,7 +43,7 @@ msgid "%{block_type}s"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:21 #: lib/block_scout_web/templates/block/overview.html.eex:20
#: lib/block_scout_web/templates/chain/_block.html.eex:11 #: lib/block_scout_web/templates/chain/_block.html.eex:11
msgid "%{count} Transactions" msgid "%{count} Transactions"
msgstr "" msgstr ""
@ -135,7 +135,7 @@ msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/address/index.html.eex:4 #: lib/block_scout_web/templates/address/index.html.eex:4
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:60 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:61
msgid "Addresses" msgid "Addresses"
msgstr "" msgstr ""
@ -304,8 +304,8 @@ msgstr ""
#: lib/block_scout_web/templates/address/_validator_metadata_modal.html.eex:37 #: lib/block_scout_web/templates/address/_validator_metadata_modal.html.eex:37
#: lib/block_scout_web/templates/address/overview.html.eex:145 #: lib/block_scout_web/templates/address/overview.html.eex:145
#: lib/block_scout_web/templates/address/overview.html.eex:153 #: lib/block_scout_web/templates/address/overview.html.eex:153
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:106 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:110
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:114 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:118
msgid "Close" msgid "Close"
msgstr "" msgstr ""
@ -480,7 +480,7 @@ msgid "Copy Txn Hash"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:59 #: lib/block_scout_web/templates/block/overview.html.eex:58
msgid "Difficulty" msgid "Difficulty"
msgstr "" msgstr ""
@ -526,7 +526,7 @@ msgid "Data"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:63 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:67
msgid "Decimals" msgid "Decimals"
msgstr "" msgstr ""
@ -562,15 +562,15 @@ msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/_tile.html.eex:56 #: lib/block_scout_web/templates/block/_tile.html.eex:56
#: lib/block_scout_web/templates/block/overview.html.eex:109 #: lib/block_scout_web/templates/block/overview.html.eex:108
#: lib/block_scout_web/templates/block/overview.html.eex:159 #: lib/block_scout_web/templates/block/overview.html.eex:158
msgid "Gas Limit" msgid "Gas Limit"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/_tile.html.eex:61 #: lib/block_scout_web/templates/block/_tile.html.eex:61
#: lib/block_scout_web/templates/block/overview.html.eex:102 #: lib/block_scout_web/templates/block/overview.html.eex:101
#: lib/block_scout_web/templates/block/overview.html.eex:153 #: lib/block_scout_web/templates/block/overview.html.eex:152
msgid "Gas Used" msgid "Gas Used"
msgstr "" msgstr ""
@ -657,7 +657,7 @@ msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/_tile.html.eex:38 #: lib/block_scout_web/templates/block/_tile.html.eex:38
#: lib/block_scout_web/templates/block/overview.html.eex:125 #: lib/block_scout_web/templates/block/overview.html.eex:124
#: lib/block_scout_web/templates/chain/_block.html.eex:15 #: lib/block_scout_web/templates/chain/_block.html.eex:15
msgid "Miner" msgid "Miner"
msgstr "" msgstr ""
@ -712,7 +712,7 @@ msgid "Execute"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:74 #: lib/block_scout_web/templates/block/overview.html.eex:73
#: lib/block_scout_web/templates/transaction/overview.html.eex:79 #: lib/block_scout_web/templates/transaction/overview.html.eex:79
msgid "Nonce" msgid "Nonce"
msgstr "" msgstr ""
@ -767,7 +767,7 @@ msgid "GET"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:85 #: lib/block_scout_web/templates/block/overview.html.eex:84
msgid "Position %{index}" msgid "Position %{index}"
msgstr "" msgstr ""
@ -798,7 +798,7 @@ msgid "Gwei"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:37 #: lib/block_scout_web/templates/block/overview.html.eex:36
msgid "Hash" msgid "Hash"
msgstr "" msgstr ""
@ -964,7 +964,7 @@ msgid "Top Accounts - %{subnetwork} Explorer"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:68 #: lib/block_scout_web/templates/block/overview.html.eex:67
msgid "Total Difficulty" msgid "Total Difficulty"
msgstr "" msgstr ""
@ -1246,7 +1246,7 @@ msgid "Parameters"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:44 #: lib/block_scout_web/templates/block/overview.html.eex:43
msgid "Parent Hash" msgid "Parent Hash"
msgstr "" msgstr ""
@ -1277,7 +1277,7 @@ msgstr ""
#: lib/block_scout_web/templates/address/overview.html.eex:33 #: lib/block_scout_web/templates/address/overview.html.eex:33
#: lib/block_scout_web/templates/address/overview.html.eex:144 #: lib/block_scout_web/templates/address/overview.html.eex:144
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:36 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:36
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:105 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:109
msgid "QR Code" msgid "QR Code"
msgstr "" msgstr ""
@ -1404,7 +1404,7 @@ msgid "Show Validator Info"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:146 #: lib/block_scout_web/templates/block/overview.html.eex:145
msgid "Block Rewards" msgid "Block Rewards"
msgstr "" msgstr ""
@ -1554,7 +1554,7 @@ msgid "Topics"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:75 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:79
msgid "Total Supply" msgid "Total Supply"
msgstr "" msgstr ""
@ -1611,7 +1611,7 @@ msgid "Transactions sent"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:61 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:64
msgid "Transfers" msgid "Transfers"
msgstr "" msgstr ""
@ -1646,7 +1646,7 @@ msgid "Uncle Reward"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:81 #: lib/block_scout_web/templates/block/overview.html.eex:80
#: lib/block_scout_web/templates/layout/_topnav.html.eex:52 #: lib/block_scout_web/templates/layout/_topnav.html.eex:52
msgid "Uncles" msgid "Uncles"
msgstr "" msgstr ""
@ -1841,3 +1841,8 @@ msgstr ""
#: lib/block_scout_web/views/address_contract_view.ex:22 #: lib/block_scout_web/views/address_contract_view.ex:22
msgid "true" msgid "true"
msgstr "" msgstr ""
#, elixir-format
#: lib/block_scout_web/templates/transaction_log/_logs.html.eex:189
msgid "Log Index"
msgstr ""

@ -43,7 +43,7 @@ msgid "%{block_type}s"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:21 #: lib/block_scout_web/templates/block/overview.html.eex:20
#: lib/block_scout_web/templates/chain/_block.html.eex:11 #: lib/block_scout_web/templates/chain/_block.html.eex:11
msgid "%{count} Transactions" msgid "%{count} Transactions"
msgstr "" msgstr ""
@ -135,7 +135,7 @@ msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/address/index.html.eex:4 #: lib/block_scout_web/templates/address/index.html.eex:4
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:60 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:61
msgid "Addresses" msgid "Addresses"
msgstr "" msgstr ""
@ -304,8 +304,8 @@ msgstr ""
#: lib/block_scout_web/templates/address/_validator_metadata_modal.html.eex:37 #: lib/block_scout_web/templates/address/_validator_metadata_modal.html.eex:37
#: lib/block_scout_web/templates/address/overview.html.eex:145 #: lib/block_scout_web/templates/address/overview.html.eex:145
#: lib/block_scout_web/templates/address/overview.html.eex:153 #: lib/block_scout_web/templates/address/overview.html.eex:153
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:106 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:110
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:114 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:118
msgid "Close" msgid "Close"
msgstr "" msgstr ""
@ -480,7 +480,7 @@ msgid "Copy Txn Hash"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:59 #: lib/block_scout_web/templates/block/overview.html.eex:58
msgid "Difficulty" msgid "Difficulty"
msgstr "" msgstr ""
@ -526,7 +526,7 @@ msgid "Data"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:63 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:67
msgid "Decimals" msgid "Decimals"
msgstr "" msgstr ""
@ -562,15 +562,15 @@ msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/_tile.html.eex:56 #: lib/block_scout_web/templates/block/_tile.html.eex:56
#: lib/block_scout_web/templates/block/overview.html.eex:109 #: lib/block_scout_web/templates/block/overview.html.eex:108
#: lib/block_scout_web/templates/block/overview.html.eex:159 #: lib/block_scout_web/templates/block/overview.html.eex:158
msgid "Gas Limit" msgid "Gas Limit"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/_tile.html.eex:61 #: lib/block_scout_web/templates/block/_tile.html.eex:61
#: lib/block_scout_web/templates/block/overview.html.eex:102 #: lib/block_scout_web/templates/block/overview.html.eex:101
#: lib/block_scout_web/templates/block/overview.html.eex:153 #: lib/block_scout_web/templates/block/overview.html.eex:152
msgid "Gas Used" msgid "Gas Used"
msgstr "" msgstr ""
@ -657,7 +657,7 @@ msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/_tile.html.eex:38 #: lib/block_scout_web/templates/block/_tile.html.eex:38
#: lib/block_scout_web/templates/block/overview.html.eex:125 #: lib/block_scout_web/templates/block/overview.html.eex:124
#: lib/block_scout_web/templates/chain/_block.html.eex:15 #: lib/block_scout_web/templates/chain/_block.html.eex:15
msgid "Miner" msgid "Miner"
msgstr "" msgstr ""
@ -712,7 +712,7 @@ msgid "Execute"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:74 #: lib/block_scout_web/templates/block/overview.html.eex:73
#: lib/block_scout_web/templates/transaction/overview.html.eex:79 #: lib/block_scout_web/templates/transaction/overview.html.eex:79
msgid "Nonce" msgid "Nonce"
msgstr "" msgstr ""
@ -767,7 +767,7 @@ msgid "GET"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:85 #: lib/block_scout_web/templates/block/overview.html.eex:84
msgid "Position %{index}" msgid "Position %{index}"
msgstr "" msgstr ""
@ -798,7 +798,7 @@ msgid "Gwei"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:37 #: lib/block_scout_web/templates/block/overview.html.eex:36
msgid "Hash" msgid "Hash"
msgstr "" msgstr ""
@ -964,7 +964,7 @@ msgid "Top Accounts - %{subnetwork} Explorer"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:68 #: lib/block_scout_web/templates/block/overview.html.eex:67
msgid "Total Difficulty" msgid "Total Difficulty"
msgstr "" msgstr ""
@ -1246,7 +1246,7 @@ msgid "Parameters"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:44 #: lib/block_scout_web/templates/block/overview.html.eex:43
msgid "Parent Hash" msgid "Parent Hash"
msgstr "" msgstr ""
@ -1277,7 +1277,7 @@ msgstr ""
#: lib/block_scout_web/templates/address/overview.html.eex:33 #: lib/block_scout_web/templates/address/overview.html.eex:33
#: lib/block_scout_web/templates/address/overview.html.eex:144 #: lib/block_scout_web/templates/address/overview.html.eex:144
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:36 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:36
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:105 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:109
msgid "QR Code" msgid "QR Code"
msgstr "" msgstr ""
@ -1404,7 +1404,7 @@ msgid "Show Validator Info"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:146 #: lib/block_scout_web/templates/block/overview.html.eex:145
msgid "Block Rewards" msgid "Block Rewards"
msgstr "" msgstr ""
@ -1554,7 +1554,7 @@ msgid "Topics"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:75 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:79
msgid "Total Supply" msgid "Total Supply"
msgstr "" msgstr ""
@ -1611,7 +1611,7 @@ msgid "Transactions sent"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/tokens/overview/_details.html.eex:61 #: lib/block_scout_web/templates/tokens/overview/_details.html.eex:64
msgid "Transfers" msgid "Transfers"
msgstr "" msgstr ""
@ -1646,7 +1646,7 @@ msgid "Uncle Reward"
msgstr "" msgstr ""
#, elixir-format #, elixir-format
#: lib/block_scout_web/templates/block/overview.html.eex:81 #: lib/block_scout_web/templates/block/overview.html.eex:80
#: lib/block_scout_web/templates/layout/_topnav.html.eex:52 #: lib/block_scout_web/templates/layout/_topnav.html.eex:52
msgid "Uncles" msgid "Uncles"
msgstr "" msgstr ""
@ -1841,3 +1841,8 @@ msgstr ""
#: lib/block_scout_web/views/address_contract_view.ex:22 #: lib/block_scout_web/views/address_contract_view.ex:22
msgid "true" msgid "true"
msgstr "" msgstr ""
#, elixir-format
#: lib/block_scout_web/templates/transaction_log/_logs.html.eex:189
msgid "Log Index"
msgstr ""

@ -1,11 +1,11 @@
defmodule BlockScoutWeb.AddressChannelTest do defmodule BlockScoutWeb.AddressChannelTest do
use BlockScoutWeb.ChannelCase, use BlockScoutWeb.ChannelCase,
# ETS tables are shared in `Explorer.Counters.AddressesWithBalanceCounter` # ETS tables are shared in `Explorer.Counters.AddressesCounter`
async: false async: false
alias BlockScoutWeb.UserSocket alias BlockScoutWeb.UserSocket
alias BlockScoutWeb.Notifier alias BlockScoutWeb.Notifier
alias Explorer.Counters.AddressesWithBalanceCounter alias Explorer.Counters.AddressesCounter
test "subscribed user is notified of new_address count event" do test "subscribed user is notified of new_address count event" do
topic = "addresses:new_address" topic = "addresses:new_address"
@ -13,8 +13,8 @@ defmodule BlockScoutWeb.AddressChannelTest do
address = insert(:address) address = insert(:address)
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
Notifier.handle_event({:chain_event, :addresses, :realtime, [address]}) Notifier.handle_event({:chain_event, :addresses, :realtime, [address]})
@ -55,8 +55,8 @@ defmodule BlockScoutWeb.AddressChannelTest do
test "notified of balance_update for matching address", %{address: address, topic: topic} do test "notified of balance_update for matching address", %{address: address, topic: topic} do
address_with_balance = %{address | fetched_coin_balance: 1} address_with_balance = %{address | fetched_coin_balance: 1}
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
Notifier.handle_event({:chain_event, :addresses, :realtime, [address_with_balance]}) Notifier.handle_event({:chain_event, :addresses, :realtime, [address_with_balance]})
@ -67,8 +67,8 @@ defmodule BlockScoutWeb.AddressChannelTest do
end end
test "not notified of balance_update if fetched_coin_balance is nil", %{address: address} do test "not notified of balance_update if fetched_coin_balance is nil", %{address: address} do
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
Notifier.handle_event({:chain_event, :addresses, :realtime, [address]}) Notifier.handle_event({:chain_event, :addresses, :realtime, [address]})

@ -5,8 +5,8 @@ defmodule BlockScoutWeb.AddressCoinBalanceByDayControllerTest do
test "returns the coin balance history grouped by date", %{conn: conn} do test "returns the coin balance history grouped by date", %{conn: conn} do
address = insert(:address) address = insert(:address)
noon = Timex.now() |> Timex.beginning_of_day() |> Timex.set(hour: 12) noon = Timex.now() |> Timex.beginning_of_day() |> Timex.set(hour: 12)
block = insert(:block, timestamp: noon) block = insert(:block, timestamp: noon, number: 2)
block_one_day_ago = insert(:block, timestamp: Timex.shift(noon, days: -1)) block_one_day_ago = insert(:block, timestamp: Timex.shift(noon, days: -1), number: 1)
insert(:fetched_balance, address_hash: address.hash, value: 1000, block_number: block.number) insert(:fetched_balance, address_hash: address.hash, value: 1000, block_number: block.number)
insert(:fetched_balance, address_hash: address.hash, value: 2000, block_number: block_one_day_ago.number) insert(:fetched_balance, address_hash: address.hash, value: 2000, block_number: block_one_day_ago.number)

@ -3,7 +3,7 @@ defmodule BlockScoutWeb.AddressControllerTest do
# ETS tables are shared in `Explorer.Counters.*` # ETS tables are shared in `Explorer.Counters.*`
async: false async: false
alias Explorer.Counters.AddressesWithBalanceCounter alias Explorer.Counters.AddressesCounter
describe "GET index/2" do describe "GET index/2" do
test "returns top addresses", %{conn: conn} do test "returns top addresses", %{conn: conn} do
@ -12,8 +12,8 @@ defmodule BlockScoutWeb.AddressControllerTest do
|> Enum.map(&insert(:address, fetched_coin_balance: &1)) |> Enum.map(&insert(:address, fetched_coin_balance: &1))
|> Enum.map(& &1.hash) |> Enum.map(& &1.hash)
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
conn = get(conn, address_path(conn, :index, %{type: "JSON"})) conn = get(conn, address_path(conn, :index, %{type: "JSON"}))
{:ok, %{"items" => items}} = Poison.decode(conn.resp_body) {:ok, %{"items" => items}} = Poison.decode(conn.resp_body)
@ -25,8 +25,8 @@ defmodule BlockScoutWeb.AddressControllerTest do
address = insert(:address, fetched_coin_balance: 1) address = insert(:address, fetched_coin_balance: 1)
insert(:address_name, address: address, primary: true, name: "POA Wallet") insert(:address_name, address: address, primary: true, name: "POA Wallet")
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
conn = get(conn, address_path(conn, :index, %{type: "JSON"})) conn = get(conn, address_path(conn, :index, %{type: "JSON"}))

@ -6,7 +6,7 @@ defmodule BlockScoutWeb.API.RPC.AddressControllerTest do
alias BlockScoutWeb.API.RPC.AddressController alias BlockScoutWeb.API.RPC.AddressController
alias Explorer.Chain alias Explorer.Chain
alias Explorer.Chain.{Events.Subscriber, Transaction, Wei} alias Explorer.Chain.{Events.Subscriber, Transaction, Wei}
alias Explorer.Counters.{AddressesWithBalanceCounter, AverageBlockTime} alias Explorer.Counters.{AddressesCounter, AverageBlockTime}
alias Indexer.Fetcher.CoinBalanceOnDemand alias Indexer.Fetcher.CoinBalanceOnDemand
alias Explorer.Repo alias Explorer.Repo
@ -22,7 +22,7 @@ defmodule BlockScoutWeb.API.RPC.AddressControllerTest do
start_supervised!({Task.Supervisor, name: Indexer.TaskSupervisor}) start_supervised!({Task.Supervisor, name: Indexer.TaskSupervisor})
start_supervised!(AverageBlockTime) start_supervised!(AverageBlockTime)
start_supervised!({CoinBalanceOnDemand, [mocked_json_rpc_named_arguments, [name: CoinBalanceOnDemand]]}) start_supervised!({CoinBalanceOnDemand, [mocked_json_rpc_named_arguments, [name: CoinBalanceOnDemand]]})
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
Application.put_env(:explorer, AverageBlockTime, enabled: true) Application.put_env(:explorer, AverageBlockTime, enabled: true)

@ -1,7 +1,7 @@
defmodule BlockScoutWeb.API.RPC.EthControllerTest do defmodule BlockScoutWeb.API.RPC.EthControllerTest do
use BlockScoutWeb.ConnCase, async: false use BlockScoutWeb.ConnCase, async: false
alias Explorer.Counters.{AddressesWithBalanceCounter, AverageBlockTime} alias Explorer.Counters.{AddressesCounter, AverageBlockTime}
alias Explorer.Repo alias Explorer.Repo
alias Indexer.Fetcher.CoinBalanceOnDemand alias Indexer.Fetcher.CoinBalanceOnDemand
@ -14,7 +14,7 @@ defmodule BlockScoutWeb.API.RPC.EthControllerTest do
start_supervised!({Task.Supervisor, name: Indexer.TaskSupervisor}) start_supervised!({Task.Supervisor, name: Indexer.TaskSupervisor})
start_supervised!(AverageBlockTime) start_supervised!(AverageBlockTime)
start_supervised!({CoinBalanceOnDemand, [mocked_json_rpc_named_arguments, [name: CoinBalanceOnDemand]]}) start_supervised!({CoinBalanceOnDemand, [mocked_json_rpc_named_arguments, [name: CoinBalanceOnDemand]]})
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
Application.put_env(:explorer, AverageBlockTime, enabled: true) Application.put_env(:explorer, AverageBlockTime, enabled: true)

@ -1,18 +1,18 @@
defmodule BlockScoutWeb.ChainControllerTest do defmodule BlockScoutWeb.ChainControllerTest do
use BlockScoutWeb.ConnCase, use BlockScoutWeb.ConnCase,
# ETS table is shared in `Explorer.Counters.AddressesWithBalanceCounter` # ETS table is shared in `Explorer.Counters.AddressesCounter`
async: false async: false
import BlockScoutWeb.WebRouter.Helpers, only: [chain_path: 2, block_path: 3, transaction_path: 3, address_path: 3] import BlockScoutWeb.WebRouter.Helpers, only: [chain_path: 2, block_path: 3, transaction_path: 3, address_path: 3]
alias Explorer.Chain.Block alias Explorer.Chain.Block
alias Explorer.Counters.AddressesWithBalanceCounter alias Explorer.Counters.AddressesCounter
setup do setup do
Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Blocks.child_id()) Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Blocks.child_id())
Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Blocks.child_id()) Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Blocks.child_id())
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
:ok :ok
end end

@ -3,7 +3,7 @@ defmodule BlockScoutWeb.ViewingAddressesTest do
# Because ETS tables is shared for `Explorer.Counters.*` # Because ETS tables is shared for `Explorer.Counters.*`
async: false async: false
alias Explorer.Counters.AddressesWithBalanceCounter alias Explorer.Counters.AddressesCounter
alias BlockScoutWeb.{AddressPage, AddressView, Notifier} alias BlockScoutWeb.{AddressPage, AddressView, Notifier}
setup do setup do
@ -58,8 +58,8 @@ defmodule BlockScoutWeb.ViewingAddressesTest do
[first_address | _] = addresses [first_address | _] = addresses
[last_address | _] = Enum.reverse(addresses) [last_address | _] = Enum.reverse(addresses)
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
session session
|> AddressPage.visit_page() |> AddressPage.visit_page()

@ -5,11 +5,11 @@ defmodule BlockScoutWeb.ViewingAppTest do
alias BlockScoutWeb.AppPage alias BlockScoutWeb.AppPage
alias BlockScoutWeb.Counters.BlocksIndexedCounter alias BlockScoutWeb.Counters.BlocksIndexedCounter
alias Explorer.Counters.AddressesWithBalanceCounter alias Explorer.Counters.AddressesCounter
setup do setup do
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
:ok :ok
end end

@ -7,7 +7,7 @@ defmodule BlockScoutWeb.ViewingChainTest do
alias BlockScoutWeb.{AddressPage, BlockPage, ChainPage, TransactionPage} alias BlockScoutWeb.{AddressPage, BlockPage, ChainPage, TransactionPage}
alias Explorer.Chain.Block alias Explorer.Chain.Block
alias Explorer.Counters.AddressesWithBalanceCounter alias Explorer.Counters.AddressesCounter
setup do setup do
Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Blocks.child_id()) Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Blocks.child_id())
@ -35,8 +35,8 @@ defmodule BlockScoutWeb.ViewingChainTest do
test "search for address", %{session: session} do test "search for address", %{session: session} do
address = insert(:address) address = insert(:address)
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
session session
|> ChainPage.visit_page() |> ChainPage.visit_page()
@ -49,8 +49,8 @@ defmodule BlockScoutWeb.ViewingChainTest do
test "search for blocks from chain page", %{session: session} do test "search for blocks from chain page", %{session: session} do
block = insert(:block, number: 6) block = insert(:block, number: 6)
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
session session
|> ChainPage.visit_page() |> ChainPage.visit_page()
@ -59,8 +59,8 @@ defmodule BlockScoutWeb.ViewingChainTest do
end end
test "blocks list", %{session: session} do test "blocks list", %{session: session} do
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
session session
|> ChainPage.visit_page() |> ChainPage.visit_page()
@ -70,8 +70,8 @@ defmodule BlockScoutWeb.ViewingChainTest do
test "inserts place holder blocks on render for out of order blocks", %{session: session} do test "inserts place holder blocks on render for out of order blocks", %{session: session} do
insert(:block, number: 409) insert(:block, number: 409)
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
session session
|> ChainPage.visit_page() |> ChainPage.visit_page()
@ -84,8 +84,8 @@ defmodule BlockScoutWeb.ViewingChainTest do
test "search for transactions", %{session: session} do test "search for transactions", %{session: session} do
transaction = insert(:transaction) transaction = insert(:transaction)
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
session session
|> ChainPage.visit_page() |> ChainPage.visit_page()
@ -94,8 +94,8 @@ defmodule BlockScoutWeb.ViewingChainTest do
end end
test "transactions list", %{session: session} do test "transactions list", %{session: session} do
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
session session
|> ChainPage.visit_page() |> ChainPage.visit_page()
@ -111,8 +111,8 @@ defmodule BlockScoutWeb.ViewingChainTest do
|> with_contract_creation(contract_address) |> with_contract_creation(contract_address)
|> with_block(block) |> with_block(block)
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
session session
|> ChainPage.visit_page() |> ChainPage.visit_page()
@ -138,8 +138,8 @@ defmodule BlockScoutWeb.ViewingChainTest do
token_contract_address: contract_token_address token_contract_address: contract_token_address
) )
start_supervised!(AddressesWithBalanceCounter) start_supervised!(AddressesCounter)
AddressesWithBalanceCounter.consolidate() AddressesCounter.consolidate()
session session
|> ChainPage.visit_page() |> ChainPage.visit_page()

@ -42,6 +42,8 @@ defmodule BlockScoutWeb.ConnCase do
Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id()) Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id())
Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id()) Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id())
Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Accounts.child_id())
Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Accounts.child_id())
{:ok, conn: Phoenix.ConnTest.build_conn()} {:ok, conn: Phoenix.ConnTest.build_conn()}
end end

@ -29,6 +29,8 @@ defmodule BlockScoutWeb.FeatureCase do
Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id()) Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id())
Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id()) Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id())
Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Accounts.child_id())
Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Accounts.child_id())
metadata = Phoenix.Ecto.SQL.Sandbox.metadata_for(Explorer.Repo, self()) metadata = Phoenix.Ecto.SQL.Sandbox.metadata_for(Explorer.Repo, self())
{:ok, session} = Wallaby.start_session(metadata: metadata) {:ok, session} = Wallaby.start_session(metadata: metadata)

@ -61,9 +61,7 @@ defmodule EthereumJSONRPC.Receipts do
""" """
@spec elixir_to_logs(elixir) :: Logs.elixir() @spec elixir_to_logs(elixir) :: Logs.elixir()
def elixir_to_logs(elixir) when is_list(elixir) do def elixir_to_logs(elixir) when is_list(elixir) do
elixir Enum.flat_map(elixir, &Receipt.elixir_to_logs/1)
|> Enum.flat_map(&Receipt.elixir_to_logs/1)
|> Enum.filter(&(Map.get(&1, "type") != "pending"))
end end
@doc """ @doc """

@ -14,7 +14,7 @@ config :explorer,
System.get_env("ALLOWED_EVM_VERSIONS") || System.get_env("ALLOWED_EVM_VERSIONS") ||
"homestead,tangerineWhistle,spuriousDragon,byzantium,constantinople,petersburg,default", "homestead,tangerineWhistle,spuriousDragon,byzantium,constantinople,petersburg,default",
include_uncles_in_average_block_time: include_uncles_in_average_block_time:
if(System.get_env("UNCLES_IN_AVERAGE_BLOCK_TIME") == "false", do: false, else: true), if(System.get_env("UNCLES_IN_AVERAGE_BLOCK_TIME") == "true", do: true, else: false),
healthy_blocks_period: System.get_env("HEALTHY_BLOCKS_PERIOD") || :timer.minutes(5) healthy_blocks_period: System.get_env("HEALTHY_BLOCKS_PERIOD") || :timer.minutes(5)
average_block_period = average_block_period =
@ -38,8 +38,6 @@ config :explorer, Explorer.Chain.Cache.BlockNumber,
ttl_check_interval: if(System.get_env("DISABLE_INDEXER") == "true", do: :timer.seconds(1), else: false), ttl_check_interval: if(System.get_env("DISABLE_INDEXER") == "true", do: :timer.seconds(1), else: false),
global_ttl: if(System.get_env("DISABLE_INDEXER") == "true", do: :timer.seconds(5)) global_ttl: if(System.get_env("DISABLE_INDEXER") == "true", do: :timer.seconds(5))
config :explorer, Explorer.ExchangeRates.Source.CoinGecko, coin_id: System.get_env("COIN_GECKO_ID", "poa-network")
balances_update_interval = balances_update_interval =
if System.get_env("ADDRESS_WITH_BALANCES_UPDATE_INTERVAL") do if System.get_env("ADDRESS_WITH_BALANCES_UPDATE_INTERVAL") do
case Integer.parse(System.get_env("ADDRESS_WITH_BALANCES_UPDATE_INTERVAL")) do case Integer.parse(System.get_env("ADDRESS_WITH_BALANCES_UPDATE_INTERVAL")) do
@ -49,6 +47,11 @@ balances_update_interval =
end end
config :explorer, Explorer.Counters.AddressesWithBalanceCounter, config :explorer, Explorer.Counters.AddressesWithBalanceCounter,
enabled: false,
enable_consolidation: true,
update_interval_in_seconds: balances_update_interval || 30 * 60
config :explorer, Explorer.Counters.AddressesCounter,
enabled: true, enabled: true,
enable_consolidation: true, enable_consolidation: true,
update_interval_in_seconds: balances_update_interval || 30 * 60 update_interval_in_seconds: balances_update_interval || 30 * 60
@ -138,6 +141,10 @@ config :explorer, Explorer.Chain.Cache.Transactions,
ttl_check_interval: if(System.get_env("DISABLE_INDEXER") == "true", do: :timer.seconds(1), else: false), ttl_check_interval: if(System.get_env("DISABLE_INDEXER") == "true", do: :timer.seconds(1), else: false),
global_ttl: if(System.get_env("DISABLE_INDEXER") == "true", do: :timer.seconds(5)) global_ttl: if(System.get_env("DISABLE_INDEXER") == "true", do: :timer.seconds(5))
config :explorer, Explorer.Chain.Cache.Accounts,
ttl_check_interval: if(System.get_env("DISABLE_INDEXER") == "true", do: :timer.seconds(1), else: false),
global_ttl: if(System.get_env("DISABLE_INDEXER") == "true", do: :timer.seconds(5))
# Import environment specific config. This must remain at the bottom # Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above. # of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs" import_config "#{Mix.env()}.exs"

@ -21,6 +21,8 @@ config :explorer, Explorer.Counters.AverageBlockTime, enabled: false
config :explorer, Explorer.Counters.AddressesWithBalanceCounter, enabled: false, enable_consolidation: false config :explorer, Explorer.Counters.AddressesWithBalanceCounter, enabled: false, enable_consolidation: false
config :explorer, Explorer.Counters.AddressesCounter, enabled: false, enable_consolidation: false
config :explorer, Explorer.Market.History.Cataloger, enabled: false config :explorer, Explorer.Market.History.Cataloger, enabled: false
config :explorer, Explorer.Tracer, disabled?: false config :explorer, Explorer.Tracer, disabled?: false

@ -8,6 +8,7 @@ defmodule Explorer.Application do
alias Explorer.Admin alias Explorer.Admin
alias Explorer.Chain.Cache.{ alias Explorer.Chain.Cache.{
Accounts,
BlockCount, BlockCount,
BlockNumber, BlockNumber,
Blocks, Blocks,
@ -49,7 +50,8 @@ defmodule Explorer.Application do
BlockNumber, BlockNumber,
con_cache_child_spec(MarketHistoryCache.cache_name()), con_cache_child_spec(MarketHistoryCache.cache_name()),
con_cache_child_spec(RSK.cache_name(), ttl_check_interval: :timer.minutes(1), global_ttl: :timer.minutes(30)), con_cache_child_spec(RSK.cache_name(), ttl_check_interval: :timer.minutes(1), global_ttl: :timer.minutes(30)),
Transactions Transactions,
Accounts
] ]
children = base_children ++ configurable_children() children = base_children ++ configurable_children()
@ -66,6 +68,7 @@ defmodule Explorer.Application do
configure(Explorer.KnownTokens), configure(Explorer.KnownTokens),
configure(Explorer.Market.History.Cataloger), configure(Explorer.Market.History.Cataloger),
configure(Explorer.Counters.AddressesWithBalanceCounter), configure(Explorer.Counters.AddressesWithBalanceCounter),
configure(Explorer.Counters.AddressesCounter),
configure(Explorer.Counters.AverageBlockTime), configure(Explorer.Counters.AverageBlockTime),
configure(Explorer.Validator.MetadataProcessor), configure(Explorer.Validator.MetadataProcessor),
configure(Explorer.Staking.EpochCounter) configure(Explorer.Staking.EpochCounter)

@ -8,6 +8,7 @@ defmodule Explorer.Chain do
from: 2, from: 2,
join: 4, join: 4,
limit: 2, limit: 2,
lock: 2,
order_by: 2, order_by: 2,
order_by: 3, order_by: 3,
offset: 2, offset: 2,
@ -48,6 +49,7 @@ defmodule Explorer.Chain do
alias Explorer.Chain.Block.{EmissionReward, Reward} alias Explorer.Chain.Block.{EmissionReward, Reward}
alias Explorer.Chain.Cache.{ alias Explorer.Chain.Cache.{
Accounts,
BlockCount, BlockCount,
BlockNumber, BlockNumber,
Blocks, Blocks,
@ -56,7 +58,7 @@ defmodule Explorer.Chain do
} }
alias Explorer.Chain.Import.Runner alias Explorer.Chain.Import.Runner
alias Explorer.Counters.AddressesWithBalanceCounter alias Explorer.Counters.{AddressesCounter, AddressesWithBalanceCounter}
alias Explorer.Market.MarketHistoryCache alias Explorer.Market.MarketHistoryCache
alias Explorer.{PagingOptions, Repo} alias Explorer.{PagingOptions, Repo}
@ -116,6 +118,14 @@ defmodule Explorer.Chain do
AddressesWithBalanceCounter.fetch() AddressesWithBalanceCounter.fetch()
end end
@doc """
Gets from the cache the count of all `t:Explorer.Chain.Address.t/0`'s
"""
@spec count_addresses_from_cache :: non_neg_integer()
def count_addresses_from_cache do
AddressesCounter.fetch()
end
@doc """ @doc """
Counts the number of addresses with fetched coin balance > 0. Counts the number of addresses with fetched coin balance > 0.
@ -129,6 +139,19 @@ defmodule Explorer.Chain do
) )
end end
@doc """
Counts the number of all addresses.
This function should be used with caution. In larger databases, it may take a
while to have the return back.
"""
def count_addresses do
Repo.one(
Address.count(),
timeout: :infinity
)
end
@doc """ @doc """
`t:Explorer.Chain.InternalTransaction/0`s from the address with the given `hash`. `t:Explorer.Chain.InternalTransaction/0`s from the address with the given `hash`.
@ -594,12 +617,15 @@ defmodule Explorer.Chain do
def create_decompiled_smart_contract(attrs) do def create_decompiled_smart_contract(attrs) do
changeset = DecompiledSmartContract.changeset(%DecompiledSmartContract{}, attrs) changeset = DecompiledSmartContract.changeset(%DecompiledSmartContract{}, attrs)
# Enforce ShareLocks tables order (see docs: sharelocks.md)
Multi.new() Multi.new()
|> Multi.run(:set_address_decompiled, fn repo, _ ->
set_address_decompiled(repo, Changeset.get_field(changeset, :address_hash))
end)
|> Multi.insert(:decompiled_smart_contract, changeset, |> Multi.insert(:decompiled_smart_contract, changeset,
on_conflict: :replace_all, on_conflict: :replace_all,
conflict_target: [:decompiler_version, :address_hash] conflict_target: [:decompiler_version, :address_hash]
) )
|> Multi.run(:set_address_decompiled, &set_address_decompiled/2)
|> Repo.transaction() |> Repo.transaction()
|> case do |> case do
{:ok, %{decompiled_smart_contract: decompiled_smart_contract}} -> {:ok, decompiled_smart_contract} {:ok, %{decompiled_smart_contract: decompiled_smart_contract}} -> {:ok, decompiled_smart_contract}
@ -1375,6 +1401,36 @@ defmodule Explorer.Chain do
def list_top_addresses(options \\ []) do def list_top_addresses(options \\ []) do
paging_options = Keyword.get(options, :paging_options, @default_paging_options) paging_options = Keyword.get(options, :paging_options, @default_paging_options)
if is_nil(paging_options.key) do
paging_options.page_size
|> Accounts.take_enough()
|> case do
nil ->
accounts_with_n = fetch_top_addresses(paging_options)
accounts_with_n
|> Enum.map(fn {address, _n} -> address end)
|> Accounts.update()
accounts_with_n
accounts ->
Enum.map(
accounts,
&{&1,
if is_nil(&1.nonce) do
0
else
&1.nonce + 1
end}
)
end
else
fetch_top_addresses(paging_options)
end
end
defp fetch_top_addresses(paging_options) do
base_query = base_query =
from(a in Address, from(a in Address,
where: a.fetched_coin_balance > ^0, where: a.fetched_coin_balance > ^0,
@ -2518,12 +2574,18 @@ defmodule Explorer.Chain do
|> SmartContract.changeset(attrs) |> SmartContract.changeset(attrs)
|> Changeset.put_change(:external_libraries, external_libraries) |> Changeset.put_change(:external_libraries, external_libraries)
address_hash = Changeset.get_field(smart_contract_changeset, :address_hash)
# Enforce ShareLocks tables order (see docs: sharelocks.md)
insert_result = insert_result =
Multi.new() Multi.new()
|> Multi.run(:set_address_verified, fn repo, _ -> set_address_verified(repo, address_hash) end)
|> Multi.run(:clear_primary_address_names, fn repo, _ -> clear_primary_address_names(repo, address_hash) end)
|> Multi.run(:insert_address_name, fn repo, _ ->
name = Changeset.get_field(smart_contract_changeset, :name)
create_address_name(repo, name, address_hash)
end)
|> Multi.insert(:smart_contract, smart_contract_changeset) |> Multi.insert(:smart_contract, smart_contract_changeset)
|> Multi.run(:clear_primary_address_names, &clear_primary_address_names/2)
|> Multi.run(:insert_address_name, &create_address_name/2)
|> Multi.run(:set_address_verified, &set_address_verified/2)
|> Repo.transaction() |> Repo.transaction()
case insert_result do case insert_result do
@ -2538,7 +2600,7 @@ defmodule Explorer.Chain do
end end
end end
defp set_address_verified(repo, %{smart_contract: %SmartContract{address_hash: address_hash}}) do defp set_address_verified(repo, address_hash) do
query = query =
from( from(
address in Address, address in Address,
@ -2551,7 +2613,7 @@ defmodule Explorer.Chain do
end end
end end
defp set_address_decompiled(repo, %{decompiled_smart_contract: %DecompiledSmartContract{address_hash: address_hash}}) do defp set_address_decompiled(repo, address_hash) do
query = query =
from( from(
address in Address, address in Address,
@ -2560,24 +2622,29 @@ defmodule Explorer.Chain do
case repo.update_all(query, set: [decompiled: true]) do case repo.update_all(query, set: [decompiled: true]) do
{1, _} -> {:ok, []} {1, _} -> {:ok, []}
_ -> {:error, "There was an error annotating that the address has been verified."} _ -> {:error, "There was an error annotating that the address has been decompiled."}
end end
end end
defp clear_primary_address_names(repo, %{smart_contract: %SmartContract{address_hash: address_hash}}) do defp clear_primary_address_names(repo, address_hash) do
clear_primary_query = query =
from( from(
address_name in Address.Name, address_name in Address.Name,
where: address_name.address_hash == ^address_hash, where: address_name.address_hash == ^address_hash,
update: [set: [primary: false]] # Enforce Name ShareLocks order (see docs: sharelocks.md)
order_by: [asc: :address_hash, asc: :name],
lock: "FOR UPDATE"
) )
repo.update_all(clear_primary_query, []) repo.update_all(
from(n in Address.Name, join: s in subquery(query), on: n.address_hash == s.address_hash),
set: [primary: false]
)
{:ok, []} {:ok, []}
end end
defp create_address_name(repo, %{smart_contract: %SmartContract{name: name, address_hash: address_hash}}) do defp create_address_name(repo, name, address_hash) do
params = %{ params = %{
address_hash: address_hash, address_hash: address_hash,
name: name, name: name,
@ -2924,14 +2991,20 @@ defmodule Explorer.Chain do
]) :: {integer(), nil | [term()]} ]) :: {integer(), nil | [term()]}
def find_and_update_replaced_transactions(transactions, timeout \\ :infinity) do def find_and_update_replaced_transactions(transactions, timeout \\ :infinity) do
query = query =
Enum.reduce(transactions, Transaction, fn %{hash: hash, nonce: nonce, from_address_hash: from_address_hash}, transactions
query -> |> Enum.reduce(
from(t in query, Transaction,
or_where: fn %{hash: hash, nonce: nonce, from_address_hash: from_address_hash}, query ->
t.nonce == ^nonce and t.from_address_hash == ^from_address_hash and t.hash != ^hash and from(t in query,
not is_nil(t.block_number) or_where:
) t.nonce == ^nonce and t.from_address_hash == ^from_address_hash and t.hash != ^hash and
end) not is_nil(t.block_number)
)
end
)
# Enforce Transaction ShareLocks order (see docs: sharelocks.md)
|> order_by(asc: :hash)
|> lock("FOR UPDATE")
hashes = Enum.map(transactions, & &1.hash) hashes = Enum.map(transactions, & &1.hash)
@ -2974,10 +3047,15 @@ defmodule Explorer.Chain do
or_where: t.nonce == ^nonce and t.from_address_hash == ^from_address and is_nil(t.block_hash) or_where: t.nonce == ^nonce and t.from_address_hash == ^from_address and is_nil(t.block_hash)
) )
end) end)
# Enforce Transaction ShareLocks order (see docs: sharelocks.md)
update_query = from(t in query, update: [set: [status: ^:error, error: "dropped/replaced"]]) |> order_by(asc: :hash)
|> lock("FOR UPDATE")
Repo.update_all(update_query, [], timeout: timeout)
Repo.update_all(
from(t in Transaction, join: s in subquery(query), on: t.hash == s.hash),
[set: [error: "dropped/replaced", status: :error]],
timeout: timeout
)
end end
end end
@ -3004,8 +3082,15 @@ defmodule Explorer.Chain do
address_name_opts = [on_conflict: :nothing, conflict_target: [:address_hash, :name]] address_name_opts = [on_conflict: :nothing, conflict_target: [:address_hash, :name]]
# Enforce ShareLocks tables order (see docs: sharelocks.md)
insert_result = insert_result =
Multi.new() Multi.new()
|> Multi.run(
:address_name,
fn repo, _ ->
{:ok, repo.insert(address_name_changeset, address_name_opts)}
end
)
|> Multi.run(:token, fn repo, _ -> |> Multi.run(:token, fn repo, _ ->
with {:error, %Changeset{errors: [{^stale_error_field, {^stale_error_message, []}}]}} <- with {:error, %Changeset{errors: [{^stale_error_field, {^stale_error_message, []}}]}} <-
repo.insert(token_changeset, token_opts) do repo.insert(token_changeset, token_opts) do
@ -3013,12 +3098,6 @@ defmodule Explorer.Chain do
{:ok, token} {:ok, token}
end end
end) end)
|> Multi.run(
:address_name,
fn repo, _ ->
{:ok, repo.insert(address_name_changeset, address_name_opts)}
end
)
|> Repo.transaction() |> Repo.transaction()
case insert_result do case insert_result do
@ -3070,9 +3149,17 @@ defmodule Explorer.Chain do
address_hash address_hash
|> CoinBalance.balances_by_day(latest_block_timestamp) |> CoinBalance.balances_by_day(latest_block_timestamp)
|> Repo.all() |> Repo.all()
|> replace_last_value(latest_block_timestamp)
|> normalize_balances_by_day() |> normalize_balances_by_day()
end end
# https://github.com/poanetwork/blockscout/issues/2658
defp replace_last_value(items, %{value: value, timestamp: timestamp}) do
List.replace_at(items, -1, %{date: Date.convert!(timestamp, Calendar.ISO), value: value})
end
defp replace_last_value(items, _), do: items
defp normalize_balances_by_day(balances_by_day) do defp normalize_balances_by_day(balances_by_day) do
result = result =
balances_by_day balances_by_day

@ -237,6 +237,16 @@ defmodule Explorer.Chain.Address do
) )
end end
@doc """
Counts all the addresses.
"""
def count do
from(
a in Address,
select: fragment("COUNT(*)")
)
end
defimpl String.Chars do defimpl String.Chars do
@doc """ @doc """
Uses `hash` as string representation, formatting it according to the eip-55 specification Uses `hash` as string representation, formatting it according to the eip-55 specification

@ -112,7 +112,7 @@ defmodule Explorer.Chain.Address.CoinBalance do
|> join(:inner, [cb], b in Block, on: cb.block_number == b.number) |> join(:inner, [cb], b in Block, on: cb.block_number == b.number)
|> where([cb], cb.address_hash == ^address_hash) |> where([cb], cb.address_hash == ^address_hash)
|> last(:block_number) |> last(:block_number)
|> select([cb, b], %{timestamp: b.timestamp}) |> select([cb, b], %{timestamp: b.timestamp, value: cb.value})
end end
def changeset(%__MODULE__{} = balance, params) do def changeset(%__MODULE__{} = balance, params) do

@ -0,0 +1,73 @@
defmodule Explorer.Chain.Cache.Accounts do
@moduledoc """
Caches the top Addresses
"""
alias Explorer.Chain.Address
use Explorer.Chain.OrderedCache,
name: :accounts,
max_size: 51,
preload: :names,
ttl_check_interval: Application.get_env(:explorer, __MODULE__)[:ttl_check_interval],
global_ttl: Application.get_env(:explorer, __MODULE__)[:global_ttl]
@type element :: Address.t()
@type id :: {non_neg_integer(), non_neg_integer()}
def element_to_id(%Address{fetched_coin_balance: fetched_coin_balance, hash: hash}) do
{fetched_coin_balance, hash}
end
def prevails?({fetched_coin_balance_a, hash_a}, {fetched_coin_balance_b, hash_b}) do
# same as a query's `order_by: [desc: :fetched_coin_balance, asc: :hash]`
if fetched_coin_balance_a == fetched_coin_balance_b do
hash_a < hash_b
else
fetched_coin_balance_a > fetched_coin_balance_b
end
end
def drop(nil), do: :ok
def drop([]), do: :ok
def drop(addresses) when is_list(addresses) do
# This has to be used by the Indexer insead of `update`.
# The reason being that addresses already in the cache can change their balance
# value and removing or updating them will result into a potentially invalid
# cache status, that would not even get corrected with time.
# The only thing we can safely do when an address in the cache changes its
# `fetched_coin_balance` is to invalidate the whole cache and wait for it
# to be filled again (by the query that it takes the place of when full).
ConCache.update(cache_name(), ids_list_key(), fn ids ->
if drop_needed?(ids, addresses) do
# Remove the addresses immediately
Enum.each(ids, &ConCache.delete(cache_name(), &1))
{:ok, []}
else
{:ok, ids}
end
end)
end
def drop(address), do: drop([address])
defp drop_needed?(ids, _addresses) when is_nil(ids), do: false
defp drop_needed?([], _addresses), do: false
defp drop_needed?(ids, addresses) do
ids_map = Map.new(ids, fn {balance, hash} -> {hash, balance} end)
# Result it `true` only when the address is present in the cache already,
# but with a different `fetched_coin_balance`
Enum.find_value(addresses, false, fn address ->
stored_address_balance = Map.get(ids_map, address.hash)
stored_address_balance && stored_address_balance != address.fetched_coin_balance
end)
end
end

@ -46,7 +46,10 @@ defmodule Explorer.Chain.ContractMethod do
end) end)
end end
Repo.insert_all(__MODULE__, successes, on_conflict: :nothing, conflict_target: [:identifier, :abi]) # Enforce ContractMethod ShareLocks order (see docs: sharelocks.md)
ordered_successes = Enum.sort_by(successes, &{&1.identifier, &1.abi})
Repo.insert_all(__MODULE__, ordered_successes, on_conflict: :nothing, conflict_target: [:identifier, :abi])
end end
def import_all do def import_all do

@ -10,7 +10,9 @@ defmodule Explorer.Chain.Import do
@stages [ @stages [
Import.Stage.Addresses, Import.Stage.Addresses,
Import.Stage.AddressReferencing Import.Stage.AddressReferencing,
Import.Stage.BlockReferencing,
Import.Stage.BlockFollowing
] ]
# in order so that foreign keys are inserted before being referenced # in order so that foreign keys are inserted before being referenced

@ -71,7 +71,7 @@ defmodule Explorer.Chain.Import.Runner.Address.CoinBalances do
defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# order so that row ShareLocks are grabbed in a consistent order # Enforce CoinBalance ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.address_hash, &1.block_number}) ordered_changes_list = Enum.sort_by(changes_list, &{&1.address_hash, &1.block_number})
{:ok, _} = {:ok, _} =

@ -106,12 +106,14 @@ defmodule Explorer.Chain.Import.Runner.Address.CurrentTokenBalances do
|> Map.put_new(:timeout, @timeout) |> Map.put_new(:timeout, @timeout)
|> Map.put(:timestamps, timestamps) |> Map.put(:timestamps, timestamps)
# order so that row ShareLocks are grabbed in a consistent order # Enforce ShareLocks tables order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.address_hash, &1.token_contract_address_hash})
multi multi
|> Multi.run(:acquire_contract_address_tokens, fn repo, _ ->
contract_address_hashes = changes_list |> Enum.map(& &1.token_contract_address_hash) |> Enum.uniq()
Tokens.acquire_contract_address_tokens(repo, contract_address_hashes)
end)
|> Multi.run(:address_current_token_balances, fn repo, _ -> |> Multi.run(:address_current_token_balances, fn repo, _ ->
insert(repo, ordered_changes_list, insert_options) insert(repo, changes_list, insert_options)
end) end)
|> Multi.run(:address_current_token_balances_update_token_holder_counts, fn repo, |> Multi.run(:address_current_token_balances_update_token_holder_counts, fn repo,
%{ %{
@ -120,6 +122,7 @@ defmodule Explorer.Chain.Import.Runner.Address.CurrentTokenBalances do
} -> } ->
token_holder_count_deltas = upserted_balances_to_holder_count_deltas(upserted_balances) token_holder_count_deltas = upserted_balances_to_holder_count_deltas(upserted_balances)
# ShareLocks order already enforced by `acquire_contract_address_tokens` (see docs: sharelocks.md)
Tokens.update_holder_counts_with_deltas( Tokens.update_holder_counts_with_deltas(
repo, repo,
token_holder_count_deltas, token_holder_count_deltas,
@ -193,10 +196,13 @@ defmodule Explorer.Chain.Import.Runner.Address.CurrentTokenBalances do
}) :: }) ::
{:ok, [CurrentTokenBalance.t()]} {:ok, [CurrentTokenBalance.t()]}
| {:error, [Changeset.t()]} | {:error, [Changeset.t()]}
defp insert(repo, ordered_changes_list, %{timeout: timeout, timestamps: timestamps} = options) defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options)
when is_atom(repo) and is_list(ordered_changes_list) do when is_atom(repo) and is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# Enforce CurrentTokenBalance ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.address_hash, &1.token_contract_address_hash})
Import.insert_changes_list( Import.insert_changes_list(
repo, repo,
ordered_changes_list, ordered_changes_list,

@ -59,7 +59,7 @@ defmodule Explorer.Chain.Import.Runner.Address.TokenBalances do
def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# order so that row ShareLocks are grabbed in a consistent order # Enforce TokenBalance ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = ordered_changes_list =
Enum.sort_by(changes_list, &{&1.address_hash, &1.token_contract_address_hash, &1.block_number}) Enum.sort_by(changes_list, &{&1.address_hash, &1.token_contract_address_hash, &1.block_number})

@ -80,7 +80,7 @@ defmodule Explorer.Chain.Import.Runner.Addresses do
defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# order so that row ShareLocks are grabbed in a consistent order # Enforce Address ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = sort_changes_list(changes_list) ordered_changes_list = sort_changes_list(changes_list)
Import.insert_changes_list( Import.insert_changes_list(
@ -104,13 +104,15 @@ defmodule Explorer.Chain.Import.Runner.Addresses do
fetched_coin_balance: fetched_coin_balance:
fragment( fragment(
""" """
CASE WHEN EXCLUDED.fetched_coin_balance_block_number IS NOT NULL AND CASE WHEN EXCLUDED.fetched_coin_balance_block_number IS NOT NULL
(? IS NULL OR AND EXCLUDED.fetched_coin_balance IS NOT NULL AND
(? IS NULL OR ? IS NULL OR
EXCLUDED.fetched_coin_balance_block_number >= ?) THEN EXCLUDED.fetched_coin_balance_block_number >= ?) THEN
EXCLUDED.fetched_coin_balance EXCLUDED.fetched_coin_balance
ELSE ? ELSE ?
END END
""", """,
address.fetched_coin_balance,
address.fetched_coin_balance_block_number, address.fetched_coin_balance_block_number,
address.fetched_coin_balance_block_number, address.fetched_coin_balance_block_number,
address.fetched_coin_balance address.fetched_coin_balance
@ -153,13 +155,18 @@ defmodule Explorer.Chain.Import.Runner.Addresses do
query = query =
from(t in Transaction, from(t in Transaction,
where: t.created_contract_address_hash in ^ordered_created_contract_hashes, where: t.created_contract_address_hash in ^ordered_created_contract_hashes,
update: [ # Enforce Transaction ShareLocks order (see docs: sharelocks.md)
set: [created_contract_code_indexed_at: ^timestamps.updated_at] order_by: t.hash,
] lock: "FOR UPDATE"
) )
try do try do
{_, result} = repo.update_all(query, [], timeout: timeout) {_, result} =
repo.update_all(
from(t in Transaction, join: s in subquery(query), on: t.hash == s.hash),
[set: [created_contract_code_indexed_at: timestamps.updated_at]],
timeout: timeout
)
{:ok, result} {:ok, result}
rescue rescue

@ -52,7 +52,7 @@ defmodule Explorer.Chain.Import.Runner.Block.Rewards do
when is_list(changes_list) do when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# order so that row ShareLocks are grabbed in a consistent order # Enforce Reward ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.address_hash, &1.address_type, &1.block_hash}) ordered_changes_list = Enum.sort_by(changes_list, &{&1.address_hash, &1.address_type, &1.block_hash})
Import.insert_changes_list( Import.insert_changes_list(

@ -62,7 +62,7 @@ defmodule Explorer.Chain.Import.Runner.Block.SecondDegreeRelations do
defp insert(repo, changes_list, %{timeout: timeout} = options) when is_atom(repo) and is_list(changes_list) do defp insert(repo, changes_list, %{timeout: timeout} = options) when is_atom(repo) and is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# order so that row ShareLocks are grabbed in a consistent order # Enforce SeconDegreeRelation ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.nephew_hash, &1.uncle_hash}) ordered_changes_list = Enum.sort_by(changes_list, &{&1.nephew_hash, &1.uncle_hash})
Import.insert_changes_list(repo, ordered_changes_list, Import.insert_changes_list(repo, ordered_changes_list,

@ -5,11 +5,10 @@ defmodule Explorer.Chain.Import.Runner.Blocks do
require Ecto.Query require Ecto.Query
import Ecto.Query, only: [from: 2, select: 2, subquery: 1, update: 2] import Ecto.Query, only: [from: 2, subquery: 1]
alias Ecto.Adapters.SQL
alias Ecto.{Changeset, Multi, Repo} alias Ecto.{Changeset, Multi, Repo}
alias Explorer.Chain.{Address, Block, Hash, Import, InternalTransaction, Transaction} alias Explorer.Chain.{Address, Block, Import, InternalTransaction, Log, TokenTransfer, Transaction}
alias Explorer.Chain.Block.Reward alias Explorer.Chain.Block.Reward
alias Explorer.Chain.Import.Runner alias Explorer.Chain.Import.Runner
alias Explorer.Chain.Import.Runner.Address.CurrentTokenBalances alias Explorer.Chain.Import.Runner.Address.CurrentTokenBalances
@ -45,39 +44,68 @@ defmodule Explorer.Chain.Import.Runner.Blocks do
|> Map.put_new(:timeout, @timeout) |> Map.put_new(:timeout, @timeout)
|> Map.put(:timestamps, timestamps) |> Map.put(:timestamps, timestamps)
ordered_consensus_block_numbers = ordered_consensus_block_numbers(changes_list) hashes = Enum.map(changes_list, & &1.hash)
where_invalid_neighbour = where_invalid_neighbour(changes_list) consensus_block_numbers = consensus_block_numbers(changes_list)
where_forked = where_forked(changes_list)
# Enforce ShareLocks tables order (see docs: sharelocks.md)
multi multi
|> Multi.run(:derive_transaction_forks, fn repo, _ -> |> Multi.run(:lose_consensus, fn repo, _ ->
derive_transaction_forks(%{ lose_consensus(repo, hashes, consensus_block_numbers, changes_list, insert_options)
repo: repo, end)
timeout: options[Runner.Transaction.Forks.option_key()][:timeout] || Runner.Transaction.Forks.timeout(), |> Multi.run(:blocks, fn repo, _ ->
timestamps: timestamps, # Note, needs to be executed after `lose_consensus` for lock acquisition
where_forked: where_forked insert(repo, changes_list, insert_options)
end)
|> Multi.run(:uncle_fetched_block_second_degree_relations, fn repo, %{blocks: blocks} when is_list(blocks) ->
update_block_second_degree_relations(repo, hashes, %{
timeout:
options[Runner.Block.SecondDegreeRelations.option_key()][:timeout] ||
Runner.Block.SecondDegreeRelations.timeout(),
timestamps: timestamps
}) })
end) end)
# MUST be after `:derive_transaction_forks`, which depends on values in `transactions` table |> Multi.run(:delete_rewards, fn repo, _ ->
delete_rewards(repo, changes_list, insert_options)
end)
|> Multi.run(:fork_transactions, fn repo, _ -> |> Multi.run(:fork_transactions, fn repo, _ ->
fork_transactions(%{ fork_transactions(%{
repo: repo, repo: repo,
timeout: options[Runner.Transactions.option_key()][:timeout] || Runner.Transactions.timeout(), timeout: options[Runner.Transactions.option_key()][:timeout] || Runner.Transactions.timeout(),
timestamps: timestamps, timestamps: timestamps,
where_forked: where_forked blocks_changes: changes_list
}) })
end) end)
|> Multi.run(:lose_consensus, fn repo, _ -> |> Multi.run(:derive_transaction_forks, fn repo, %{fork_transactions: transactions} ->
lose_consensus(repo, ordered_consensus_block_numbers, insert_options) derive_transaction_forks(%{
repo: repo,
timeout: options[Runner.Transaction.Forks.option_key()][:timeout] || Runner.Transaction.Forks.timeout(),
timestamps: timestamps,
transactions: transactions
})
end)
|> Multi.run(:remove_nonconsensus_logs, fn repo, %{derive_transaction_forks: transactions} ->
remove_nonconsensus_logs(repo, transactions, insert_options)
end)
|> Multi.run(:acquire_internal_transactions, fn repo, %{derive_transaction_forks: transactions} ->
acquire_internal_transactions(repo, hashes, transactions)
end) end)
|> Multi.run(:lose_invalid_neighbour_consensus, fn repo, _ -> |> Multi.run(:remove_nonconsensus_internal_transactions, fn repo, %{derive_transaction_forks: transactions} ->
lose_invalid_neighbour_consensus(repo, where_invalid_neighbour, insert_options) remove_nonconsensus_internal_transactions(repo, transactions, insert_options)
end)
|> Multi.run(:internal_transaction_transaction_block_number, fn repo, _ ->
update_internal_transaction_block_number(repo, hashes)
end)
|> Multi.run(:acquire_contract_address_tokens, fn repo, _ ->
acquire_contract_address_tokens(repo, consensus_block_numbers)
end)
|> Multi.run(:remove_nonconsensus_token_transfers, fn repo, %{derive_transaction_forks: transactions} ->
remove_nonconsensus_token_transfers(repo, transactions, insert_options)
end) end)
|> Multi.run(:delete_address_token_balances, fn repo, _ -> |> Multi.run(:delete_address_token_balances, fn repo, _ ->
delete_address_token_balances(repo, ordered_consensus_block_numbers, insert_options) delete_address_token_balances(repo, consensus_block_numbers, insert_options)
end) end)
|> Multi.run(:delete_address_current_token_balances, fn repo, _ -> |> Multi.run(:delete_address_current_token_balances, fn repo, _ ->
delete_address_current_token_balances(repo, ordered_consensus_block_numbers, insert_options) delete_address_current_token_balances(repo, consensus_block_numbers, insert_options)
end) end)
|> Multi.run(:derive_address_current_token_balances, fn repo, |> Multi.run(:derive_address_current_token_balances, fn repo,
%{ %{
@ -94,130 +122,131 @@ defmodule Explorer.Chain.Import.Runner.Blocks do
deltas = CurrentTokenBalances.token_holder_count_deltas(%{deleted: deleted, inserted: inserted}) deltas = CurrentTokenBalances.token_holder_count_deltas(%{deleted: deleted, inserted: inserted})
Tokens.update_holder_counts_with_deltas(repo, deltas, insert_options) Tokens.update_holder_counts_with_deltas(repo, deltas, insert_options)
end) end)
|> Multi.run(:delete_rewards, fn repo, _ ->
delete_rewards(repo, changes_list, insert_options)
end)
|> Multi.run(:blocks, fn repo, _ ->
insert(repo, changes_list, insert_options)
end)
|> Multi.run(:uncle_fetched_block_second_degree_relations, fn repo, %{blocks: blocks} when is_list(blocks) ->
update_block_second_degree_relations(
repo,
blocks,
%{
timeout:
options[Runner.Block.SecondDegreeRelations.option_key()][:timeout] ||
Runner.Block.SecondDegreeRelations.timeout(),
timestamps: timestamps
}
)
end)
|> Multi.run(
:internal_transaction_transaction_block_number,
fn repo, %{blocks: blocks} ->
blocks_hashes = Enum.map(blocks, & &1.hash)
query =
from(
internal_transaction in InternalTransaction,
join: transaction in Transaction,
on: internal_transaction.transaction_hash == transaction.hash,
join: block in Block,
on: block.hash == transaction.block_hash,
where: block.hash in ^blocks_hashes,
update: [
set: [
block_number: block.number
]
]
)
{total, _} = repo.update_all(query, [])
{:ok, total}
end
)
end end
@impl Runner @impl Runner
def timeout, do: @timeout def timeout, do: @timeout
# sobelow_skip ["SQL.Query"] defp acquire_contract_address_tokens(repo, consensus_block_numbers) do
defp derive_transaction_forks(%{
repo: repo,
timeout: timeout,
timestamps: %{inserted_at: inserted_at, updated_at: updated_at},
where_forked: where_forked
}) do
query = query =
from(transaction in where_forked, from(address_current_token_balance in Address.CurrentTokenBalance,
select: [ where: address_current_token_balance.block_number in ^consensus_block_numbers,
transaction.block_hash, select: address_current_token_balance.token_contract_address_hash
transaction.index, )
transaction.hash,
type(^inserted_at, transaction.inserted_at), contract_address_hashes = repo.all(query)
type(^updated_at, transaction.updated_at)
Tokens.acquire_contract_address_tokens(repo, contract_address_hashes)
end
defp acquire_internal_transactions(repo, hashes, forked_transaction_hashes) do
query =
from(internal_transaction in InternalTransaction,
join: transaction in Transaction,
on: internal_transaction.transaction_hash == transaction.hash,
where: transaction.block_hash in ^hashes,
or_where: transaction.hash in ^forked_transaction_hashes,
select: {internal_transaction.transaction_hash, internal_transaction.index},
# Enforce InternalTransaction ShareLocks order (see docs: sharelocks.md)
order_by: [
internal_transaction.transaction_hash,
internal_transaction.index
], ],
# order so that row ShareLocks are grabbed in a consistent order with # NOTE: find a better way to know the alias that ecto gives to token
# `Explorer.Chain.Import.Runner.Transactions.insert` lock: "FOR UPDATE OF i0"
order_by: transaction.hash
) )
{select_sql, parameters} = SQL.to_sql(:all, repo, query) {:ok, repo.all(query)}
insert_sql = """
INSERT INTO transaction_forks (uncle_hash, index, hash, inserted_at, updated_at)
#{select_sql}
ON CONFLICT (uncle_hash, index)
DO UPDATE SET hash = EXCLUDED.hash
WHERE EXCLUDED.hash <> transaction_forks.hash
RETURNING uncle_hash, hash
"""
with {:ok, %Postgrex.Result{columns: ["uncle_hash", "hash"], command: :insert, rows: rows}} <-
SQL.query(
repo,
insert_sql,
parameters,
timeout: timeout
) do
derived_transaction_forks = Enum.map(rows, fn [uncle_hash, hash] -> %{uncle_hash: uncle_hash, hash: hash} end)
{:ok, derived_transaction_forks}
end
end end
defp fork_transactions(%{ defp fork_transactions(%{
repo: repo, repo: repo,
timeout: timeout, timeout: timeout,
timestamps: %{updated_at: updated_at}, timestamps: %{updated_at: updated_at},
where_forked: where_forked blocks_changes: blocks_changes
}) do }) do
query = query =
where_forked from(
|> update( transaction in where_forked(blocks_changes),
set: [ select: %{
block_hash: nil, block_hash: transaction.block_hash,
block_number: nil, index: transaction.index,
gas_used: nil, hash: transaction.hash
cumulative_gas_used: nil, },
index: nil, # Enforce Transaction ShareLocks order (see docs: sharelocks.md)
internal_transactions_indexed_at: nil, order_by: [asc: :hash],
status: nil, lock: "FOR UPDATE"
error: nil,
updated_at: ^updated_at
]
) )
|> select([:hash])
try do update_query =
{_, result} = repo.update_all(query, [], timeout: timeout) from(
t in Transaction,
join: s in subquery(query),
on: t.hash == s.hash,
update: [
set: [
block_hash: nil,
block_number: nil,
gas_used: nil,
cumulative_gas_used: nil,
index: nil,
internal_transactions_indexed_at: nil,
status: nil,
error: nil,
updated_at: ^updated_at
]
],
select: %{
block_hash: s.block_hash,
index: s.index,
hash: s.hash
}
)
{:ok, result} {_num, transactions} = repo.update_all(update_query, [], timeout: timeout)
rescue
postgrex_error in Postgrex.Error -> {:ok, transactions}
{:error, %{exception: postgrex_error}} rescue
end postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error}}
end
defp derive_transaction_forks(%{
repo: repo,
timeout: timeout,
timestamps: %{inserted_at: inserted_at, updated_at: updated_at},
transactions: transactions
}) do
transaction_forks =
transactions
|> Enum.map(fn transaction ->
%{
uncle_hash: transaction.block_hash,
index: transaction.index,
hash: transaction.hash,
inserted_at: inserted_at,
updated_at: updated_at
}
end)
# Enforce Fork ShareLocks order (see docs: sharelocks.md)
|> Enum.sort_by(&{&1.uncle_hash, &1.index})
{_total, forked_transaction} =
repo.insert_all(
Transaction.Fork,
transaction_forks,
conflict_target: [:uncle_hash, :index],
on_conflict:
from(
transaction_fork in Transaction.Fork,
update: [set: [hash: fragment("EXCLUDED.hash")]],
where: fragment("EXCLUDED.hash <> ?", transaction_fork.hash)
),
returning: [:hash],
timeout: timeout
)
{:ok, Enum.map(forked_transaction, & &1.hash)}
end end
@spec insert(Repo.t(), [map()], %{ @spec insert(Repo.t(), [map()], %{
@ -228,8 +257,8 @@ defmodule Explorer.Chain.Import.Runner.Blocks do
defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# order so that row ShareLocks are grabbed in a consistent order # Enforce Block ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.number, &1.hash}) ordered_changes_list = Enum.sort_by(changes_list, & &1.hash)
Import.insert_changes_list( Import.insert_changes_list(
repo, repo,
@ -277,85 +306,135 @@ defmodule Explorer.Chain.Import.Runner.Blocks do
) )
end end
defp ordered_consensus_block_numbers(blocks_changes) when is_list(blocks_changes) do defp consensus_block_numbers(blocks_changes) when is_list(blocks_changes) do
blocks_changes blocks_changes
|> Enum.reduce(MapSet.new(), fn |> Enum.filter(& &1.consensus)
%{consensus: true, number: number}, acc -> |> Enum.map(& &1.number)
MapSet.put(acc, number)
%{consensus: false}, acc ->
acc
end)
|> Enum.sort()
end end
defp lose_consensus(_, [], _), do: {:ok, []} defp lose_consensus(repo, hashes, consensus_block_numbers, changes_list, %{
timeout: timeout,
timestamps: %{updated_at: updated_at}
}) do
acquire_query =
from(
block in where_invalid_neighbour(changes_list),
or_where: block.number in ^consensus_block_numbers,
# we also need to acquire blocks that will be upserted here, for ordering
or_where: block.hash in ^hashes,
select: block.hash,
# Enforce Block ShareLocks order (see docs: sharelocks.md)
order_by: [asc: block.hash],
lock: "FOR UPDATE"
)
defp lose_consensus(repo, ordered_consensus_block_number, %{timeout: timeout, timestamps: %{updated_at: updated_at}}) {_, removed_consensus_block_hashes} =
when is_list(ordered_consensus_block_number) do repo.update_all(
query = from(
block in Block,
join: s in subquery(acquire_query),
on: block.hash == s.hash,
# we don't want to remove consensus from blocks that will be upserted
where: block.hash not in ^hashes,
select: block.hash
),
[set: [consensus: false, updated_at: updated_at]],
timeout: timeout
)
{:ok, removed_consensus_block_hashes}
rescue
postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error, consensus_block_numbers: consensus_block_numbers}}
end
defp remove_nonconsensus_token_transfers(repo, forked_transaction_hashes, %{timeout: timeout}) do
ordered_token_transfers =
from( from(
block in Block, token_transfer in TokenTransfer,
where: block.number in ^ordered_consensus_block_number, where: token_transfer.transaction_hash in ^forked_transaction_hashes,
update: [ select: token_transfer.transaction_hash,
set: [ # Enforce TokenTransfer ShareLocks order (see docs: sharelocks.md)
consensus: false, order_by: [
updated_at: ^updated_at token_transfer.transaction_hash,
] token_transfer.log_index
], ],
select: [:hash, :number] lock: "FOR UPDATE"
) )
try do query =
{_, result} = repo.update_all(query, [], timeout: timeout) from(token_transfer in TokenTransfer,
select: map(token_transfer, [:transaction_hash, :log_index]),
inner_join: ordered_token_transfer in subquery(ordered_token_transfers),
on: ordered_token_transfer.transaction_hash == token_transfer.transaction_hash
)
{:ok, result} {_count, deleted_token_transfers} = repo.delete_all(query, timeout: timeout)
rescue
postgrex_error in Postgrex.Error -> {:ok, deleted_token_transfers}
{:error, %{exception: postgrex_error, consensus_block_numbers: ordered_consensus_block_number}} rescue
end postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error, transactions: forked_transaction_hashes}}
end end
defp lose_invalid_neighbour_consensus(repo, where_invalid_neighbour, %{ defp remove_nonconsensus_internal_transactions(repo, forked_transaction_hashes, %{timeout: timeout}) do
timeout: timeout,
timestamps: %{updated_at: updated_at}
}) do
query = query =
from(internal_transaction in InternalTransaction,
where: internal_transaction.transaction_hash in ^forked_transaction_hashes,
select: map(internal_transaction, [:transaction_hash, :index])
)
# ShareLocks order already enforced by `acquire_internal_transactions` (see docs: sharelocks.md)
{_count, deleted_internal_transactions} = repo.delete_all(query, timeout: timeout)
{:ok, deleted_internal_transactions}
rescue
postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error, transactions: forked_transaction_hashes}}
end
defp remove_nonconsensus_logs(repo, forked_transaction_hashes, %{timeout: timeout}) do
ordered_logs =
from( from(
block in where_invalid_neighbour, log in Log,
update: [ where: log.transaction_hash in ^forked_transaction_hashes,
set: [ select: log.transaction_hash,
consensus: false, # Enforce Log ShareLocks order (see docs: sharelocks.md)
updated_at: ^updated_at order_by: [
] log.transaction_hash,
log.index
], ],
select: [:hash, :number] lock: "FOR UPDATE"
) )
try do query =
{_, result} = repo.update_all(query, [], timeout: timeout) from(log in Log,
select: map(log, [:transaction_hash, :index]),
inner_join: ordered_log in subquery(ordered_logs),
on: ordered_log.transaction_hash == log.transaction_hash
)
{:ok, result} {_count, deleted_logs} = repo.delete_all(query, timeout: timeout)
rescue
postgrex_error in Postgrex.Error -> {:ok, deleted_logs}
{:error, %{exception: postgrex_error, where_invalid_neighbour: where_invalid_neighbour}} rescue
end postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error, transactions: forked_transaction_hashes}}
end end
defp delete_address_token_balances(_, [], _), do: {:ok, []} defp delete_address_token_balances(_, [], _), do: {:ok, []}
defp delete_address_token_balances(repo, ordered_consensus_block_numbers, %{timeout: timeout}) do defp delete_address_token_balances(repo, consensus_block_numbers, %{timeout: timeout}) do
ordered_query = ordered_query =
from(address_token_balance in Address.TokenBalance, from(address_token_balance in Address.TokenBalance,
where: address_token_balance.block_number in ^ordered_consensus_block_numbers, where: address_token_balance.block_number in ^consensus_block_numbers,
select: map(address_token_balance, [:address_hash, :token_contract_address_hash, :block_number]), select: map(address_token_balance, [:address_hash, :token_contract_address_hash, :block_number]),
# MUST match order in `Explorer.Chain.Import.Runner.Address.TokenBalances.insert` to prevent ShareLock ordering deadlocks. # Enforce TokenBalance ShareLocks order (see docs: sharelocks.md)
order_by: [ order_by: [
address_token_balance.address_hash, address_token_balance.address_hash,
address_token_balance.token_contract_address_hash, address_token_balance.token_contract_address_hash,
address_token_balance.block_number address_token_balance.block_number
], ],
# ensures rows remains locked while outer query is joining to it
lock: "FOR UPDATE" lock: "FOR UPDATE"
) )
@ -376,23 +455,22 @@ defmodule Explorer.Chain.Import.Runner.Blocks do
{:ok, deleted_address_token_balances} {:ok, deleted_address_token_balances}
rescue rescue
postgrex_error in Postgrex.Error -> postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error, block_numbers: ordered_consensus_block_numbers}} {:error, %{exception: postgrex_error, block_numbers: consensus_block_numbers}}
end end
end end
defp delete_address_current_token_balances(_, [], _), do: {:ok, []} defp delete_address_current_token_balances(_, [], _), do: {:ok, []}
defp delete_address_current_token_balances(repo, ordered_consensus_block_numbers, %{timeout: timeout}) do defp delete_address_current_token_balances(repo, consensus_block_numbers, %{timeout: timeout}) do
ordered_query = ordered_query =
from(address_current_token_balance in Address.CurrentTokenBalance, from(address_current_token_balance in Address.CurrentTokenBalance,
where: address_current_token_balance.block_number in ^ordered_consensus_block_numbers, where: address_current_token_balance.block_number in ^consensus_block_numbers,
select: map(address_current_token_balance, [:address_hash, :token_contract_address_hash]), select: map(address_current_token_balance, [:address_hash, :token_contract_address_hash]),
# MUST match order in `Explorer.Chain.Import.Runner.Address.CurrentTokenBalances.insert` to prevent ShareLock ordering deadlocks. # Enforce CurrentTokenBalance ShareLocks order (see docs: sharelocks.md)
order_by: [ order_by: [
address_current_token_balance.address_hash, address_current_token_balance.address_hash,
address_current_token_balance.token_contract_address_hash address_current_token_balance.token_contract_address_hash
], ],
# ensures row remains locked while outer query is joining to it
lock: "FOR UPDATE" lock: "FOR UPDATE"
) )
@ -420,13 +498,12 @@ defmodule Explorer.Chain.Import.Runner.Blocks do
{:ok, deleted_address_current_token_balances} {:ok, deleted_address_current_token_balances}
rescue rescue
postgrex_error in Postgrex.Error -> postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error, block_numbers: ordered_consensus_block_numbers}} {:error, %{exception: postgrex_error, block_numbers: consensus_block_numbers}}
end end
end end
defp derive_address_current_token_balances(_, [], _), do: {:ok, []} defp derive_address_current_token_balances(_, [], _), do: {:ok, []}
# sobelow_skip ["SQL.Query"]
defp derive_address_current_token_balances(repo, deleted_address_current_token_balances, %{timeout: timeout}) defp derive_address_current_token_balances(repo, deleted_address_current_token_balances, %{timeout: timeout})
when is_list(deleted_address_current_token_balances) do when is_list(deleted_address_current_token_balances) do
initial_query = initial_query =
@ -460,57 +537,39 @@ defmodule Explorer.Chain.Import.Runner.Blocks do
address_token_balance.address_hash == new_current_token_balance.address_hash and address_token_balance.address_hash == new_current_token_balance.address_hash and
address_token_balance.token_contract_address_hash == new_current_token_balance.token_contract_address_hash and address_token_balance.token_contract_address_hash == new_current_token_balance.token_contract_address_hash and
address_token_balance.block_number == new_current_token_balance.block_number, address_token_balance.block_number == new_current_token_balance.block_number,
select: { select: %{
new_current_token_balance.address_hash, address_hash: new_current_token_balance.address_hash,
new_current_token_balance.token_contract_address_hash, token_contract_address_hash: new_current_token_balance.token_contract_address_hash,
new_current_token_balance.block_number, block_number: new_current_token_balance.block_number,
address_token_balance.value, value: address_token_balance.value,
over(min(address_token_balance.inserted_at), :w), inserted_at: over(min(address_token_balance.inserted_at), :w),
over(max(address_token_balance.updated_at), :w) updated_at: over(max(address_token_balance.updated_at), :w)
}, },
# Prevent ShareLock deadlock by matching order of `Explorer.Chain.Import.Runner.Address.CurrentTokenBalances.insert`
order_by: [new_current_token_balance.address_hash, new_current_token_balance.token_contract_address_hash],
windows: [ windows: [
w: [partition_by: [address_token_balance.address_hash, address_token_balance.token_contract_address_hash]] w: [partition_by: [address_token_balance.address_hash, address_token_balance.token_contract_address_hash]]
] ]
) )
{select_sql, parameters} = SQL.to_sql(:all, repo, new_current_token_balance_query) ordered_current_token_balance =
new_current_token_balance_query
# No `ON CONFLICT` because `delete_address_current_token_balances` should have removed any conflicts. |> repo.all()
insert_sql = """ # Enforce CurrentTokenBalance ShareLocks order (see docs: sharelocks.md)
INSERT INTO address_current_token_balances (address_hash, token_contract_address_hash, block_number, value, inserted_at, updated_at) |> Enum.sort_by(&{&1.address_hash, &1.token_contract_address_hash})
#{select_sql}
RETURNING address_hash, token_contract_address_hash, block_number, value {_total, result} =
""" repo.insert_all(
Address.CurrentTokenBalance,
with {:ok, ordered_current_token_balance,
%Postgrex.Result{ # No `ON CONFLICT` because `delete_address_current_token_balances`
columns: [ # should have removed any conflicts.
"address_hash", returning: [:address_hash, :token_contract_address_hash, :block_number, :value],
"token_contract_address_hash", timeout: timeout
"block_number", )
# needed for `update_tokens_holder_count`
"value" derived_address_current_token_balances =
], Enum.map(result, &Map.take(&1, [:address_hash, :token_contract_address_hash, :block_number, :value]))
command: :insert,
rows: rows {:ok, derived_address_current_token_balances}
}} <- SQL.query(repo, insert_sql, parameters, timeout: timeout) do
derived_address_current_token_balances =
Enum.map(rows, fn [address_hash_bytes, token_contract_address_hash_bytes, block_number, value] ->
{:ok, address_hash} = Hash.Address.load(address_hash_bytes)
{:ok, token_contract_address_hash} = Hash.Address.load(token_contract_address_hash_bytes)
%{
address_hash: address_hash,
token_contract_address_hash: token_contract_address_hash,
block_number: block_number,
value: value
}
end)
{:ok, derived_address_current_token_balances}
end
end end
# `block_rewards` are linked to `blocks.hash`, but fetched by `blocks.number`, so when a block with the same number is # `block_rewards` are linked to `blocks.hash`, but fetched by `blocks.number`, so when a block with the same number is
@ -528,11 +587,24 @@ defmodule Explorer.Chain.Import.Runner.Blocks do
query = query =
from(reward in Reward, from(reward in Reward,
inner_join: block in assoc(reward, :block), inner_join: block in assoc(reward, :block),
where: block.hash in ^hashes or block.number in ^numbers where: block.hash in ^hashes or block.number in ^numbers,
# Enforce Reward ShareLocks order (see docs: sharelocks.md)
order_by: [asc: :address_hash, asc: :address_type, asc: :block_hash],
# NOTE: find a better way to know the alias that ecto gives to token
lock: "FOR UPDATE OF b0"
)
delete_query =
from(r in Reward,
join: s in subquery(query),
on:
r.address_hash == s.address_hash and
r.address_type == s.address_type and
r.block_hash == s.block_hash
) )
try do try do
{count, nil} = repo.delete_all(query, timeout: timeout) {count, nil} = repo.delete_all(delete_query, timeout: timeout)
{:ok, count} {:ok, count}
rescue rescue
@ -541,34 +613,56 @@ defmodule Explorer.Chain.Import.Runner.Blocks do
end end
end end
defp update_block_second_degree_relations(repo, blocks, %{timeout: timeout, timestamps: %{updated_at: updated_at}}) defp update_block_second_degree_relations(repo, uncle_hashes, %{
when is_list(blocks) do timeout: timeout,
ordered_uncle_hashes = timestamps: %{updated_at: updated_at}
blocks })
|> MapSet.new(& &1.hash) when is_list(uncle_hashes) do
|> Enum.sort()
query = query =
from( from(
bsdr in Block.SecondDegreeRelation, bsdr in Block.SecondDegreeRelation,
where: bsdr.uncle_hash in ^ordered_uncle_hashes, where: bsdr.uncle_hash in ^uncle_hashes,
update: [ # Enforce SeconDegreeRelation ShareLocks order (see docs: sharelocks.md)
set: [ order_by: [asc: :nephew_hash, asc: :uncle_hash],
uncle_fetched_at: ^updated_at lock: "FOR UPDATE"
] )
]
update_query =
from(
b in Block.SecondDegreeRelation,
join: s in subquery(query),
on: b.nephew_hash == s.nephew_hash and b.uncle_hash == s.uncle_hash,
update: [set: [uncle_fetched_at: ^updated_at]]
) )
try do try do
{_, result} = repo.update_all(query, [], timeout: timeout) {_, result} = repo.update_all(update_query, [], timeout: timeout)
{:ok, result} {:ok, result}
rescue rescue
postgrex_error in Postgrex.Error -> postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error, uncle_hashes: ordered_uncle_hashes}} {:error, %{exception: postgrex_error, uncle_hashes: uncle_hashes}}
end end
end end
defp update_internal_transaction_block_number(repo, blocks_hashes) when is_list(blocks_hashes) do
query =
from(
internal_transaction in InternalTransaction,
join: transaction in Transaction,
on: internal_transaction.transaction_hash == transaction.hash,
join: block in Block,
on: block.hash == transaction.block_hash,
where: block.hash in ^blocks_hashes,
update: [set: [block_number: block.number]]
)
# ShareLocks order already enforced by `acquire_internal_transactions` (see docs: sharelocks.md)
{total, _} = repo.update_all(query, [])
{:ok, total}
end
defp where_forked(blocks_changes) when is_list(blocks_changes) do defp where_forked(blocks_changes) when is_list(blocks_changes) do
initial = from(t in Transaction, where: false) initial = from(t in Transaction, where: false)

@ -47,12 +47,16 @@ defmodule Explorer.Chain.Import.Runner.InternalTransactions do
update_transactions_options = %{timeout: transactions_timeout, timestamps: timestamps} update_transactions_options = %{timeout: transactions_timeout, timestamps: timestamps}
# Enforce ShareLocks tables order (see docs: sharelocks.md)
multi multi
|> Multi.run(:acquire_transactions, fn repo, _ ->
acquire_transactions(repo, changes_list)
end)
|> Multi.run(:internal_transactions, fn repo, _ -> |> Multi.run(:internal_transactions, fn repo, _ ->
insert(repo, changes_list, insert_options) insert(repo, changes_list, insert_options)
end) end)
|> Multi.run(:internal_transactions_indexed_at_transactions, fn repo, _ -> |> Multi.run(:internal_transactions_indexed_at_transactions, fn repo, %{acquire_transactions: transaction_hashes} ->
update_transactions(repo, changes_list, update_transactions_options) update_transactions(repo, transaction_hashes, update_transactions_options)
end) end)
end end
@ -70,7 +74,7 @@ defmodule Explorer.Chain.Import.Runner.InternalTransactions do
when is_list(changes_list) do when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# order so that row ShareLocks are grabbed in a consistent order # Enforce InternalTransaction ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.transaction_hash, &1.index}) ordered_changes_list = Enum.sort_by(changes_list, &{&1.transaction_hash, &1.index})
final_changes_list = reject_pending_transactions(ordered_changes_list, repo) final_changes_list = reject_pending_transactions(ordered_changes_list, repo)
@ -144,21 +148,38 @@ defmodule Explorer.Chain.Import.Runner.InternalTransactions do
) )
end end
defp update_transactions(repo, internal_transactions, %{ defp acquire_transactions(repo, internal_transactions) do
timeout: timeout, transaction_hashes =
timestamps: timestamps
})
when is_list(internal_transactions) do
ordered_transaction_hashes =
internal_transactions internal_transactions
|> MapSet.new(& &1.transaction_hash) |> MapSet.new(& &1.transaction_hash)
|> Enum.sort() |> MapSet.to_list()
query = query =
from( from(
t in Transaction, t in Transaction,
where: t.hash in ^ordered_transaction_hashes, where: t.hash in ^transaction_hashes,
where: not is_nil(t.block_hash), where: not is_nil(t.block_hash),
select: t.hash,
# Enforce Transaction ShareLocks order (see docs: sharelocks.md)
order_by: t.hash,
lock: "FOR UPDATE"
)
hashes = repo.all(query)
{:ok, hashes}
end
defp update_transactions(repo, transaction_hashes, %{
timeout: timeout,
timestamps: timestamps
})
when is_list(transaction_hashes) do
update_query =
from(
t in Transaction,
where: t.hash in ^transaction_hashes,
# ShareLocks order already enforced by `acquire_transactions` (see docs: sharelocks.md)
update: [ update: [
set: [ set: [
internal_transactions_indexed_at: ^timestamps.updated_at, internal_transactions_indexed_at: ^timestamps.updated_at,
@ -184,12 +205,12 @@ defmodule Explorer.Chain.Import.Runner.InternalTransactions do
) )
try do try do
{_transaction_count, result} = repo.update_all(query, [], timeout: timeout) {_transaction_count, result} = repo.update_all(update_query, [], timeout: timeout)
{:ok, result} {:ok, result}
rescue rescue
postgrex_error in Postgrex.Error -> postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error, transaction_hashes: ordered_transaction_hashes}} {:error, %{exception: postgrex_error, transaction_hashes: transaction_hashes}}
end end
end end

@ -49,36 +49,36 @@ defmodule Explorer.Chain.Import.Runner.InternalTransactionsIndexedAtBlocks do
defp update_blocks(_repo, [], %{}), do: {:ok, []} defp update_blocks(_repo, [], %{}), do: {:ok, []}
defp update_blocks(repo, block_numbers, %{ defp update_blocks(repo, changes_list, %{
timeout: timeout, timeout: timeout,
timestamps: timestamps timestamps: timestamps
}) })
when is_list(block_numbers) do when is_list(changes_list) do
ordered_block_numbers = block_numbers = Enum.map(changes_list, fn %{number: number} -> number end)
block_numbers
|> Enum.map(fn %{number: number} -> number end)
|> Enum.sort()
query = query =
from( from(
b in Block, b in Block,
where: b.number in ^ordered_block_numbers and b.consensus, where: b.number in ^block_numbers and b.consensus,
update: [ # Enforce Block ShareLocks order (see docs: sharelocks.md)
set: [ order_by: [asc: b.hash],
internal_transactions_indexed_at: ^timestamps.updated_at lock: "FOR UPDATE"
]
]
) )
block_count = Enum.count(ordered_block_numbers) block_count = Enum.count(block_numbers)
try do try do
{^block_count, result} = repo.update_all(query, [], timeout: timeout) {^block_count, result} =
repo.update_all(
from(b in Block, join: s in subquery(query), on: b.hash == s.hash),
[set: [internal_transactions_indexed_at: timestamps.updated_at]],
timeout: timeout
)
{:ok, result} {:ok, result}
rescue rescue
postgrex_error in Postgrex.Error -> postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error, block_numbers: ordered_block_numbers}} {:error, %{exception: postgrex_error, block_numbers: block_numbers}}
end end
end end
end end

@ -58,7 +58,7 @@ defmodule Explorer.Chain.Import.Runner.Logs do
defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# order so that row ShareLocks are grabbed in a consistent order # Enforce Log ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.transaction_hash, &1.index}) ordered_changes_list = Enum.sort_by(changes_list, &{&1.transaction_hash, &1.index})
{:ok, _} = {:ok, _} =

@ -40,7 +40,11 @@ defmodule Explorer.Chain.Import.Runner.StakingPools do
|> Map.put_new(:timeout, @timeout) |> Map.put_new(:timeout, @timeout)
|> Map.put(:timestamps, timestamps) |> Map.put(:timestamps, timestamps)
# Enforce ShareLocks tables order (see docs: sharelocks.md)
multi multi
|> Multi.run(:acquire_all_staking_pools, fn repo, _ ->
acquire_all_staking_pools(repo)
end)
|> Multi.run(:mark_as_deleted, fn repo, _ -> |> Multi.run(:mark_as_deleted, fn repo, _ ->
mark_as_deleted(repo, changes_list, insert_options) mark_as_deleted(repo, changes_list, insert_options)
end) end)
@ -55,6 +59,20 @@ defmodule Explorer.Chain.Import.Runner.StakingPools do
@impl Import.Runner @impl Import.Runner
def timeout, do: @timeout def timeout, do: @timeout
defp acquire_all_staking_pools(repo) do
query =
from(
pool in StakingPool,
# Enforce StackingPool ShareLocks order (see docs: sharelocks.md)
order_by: pool.staking_address_hash,
lock: "FOR UPDATE"
)
pools = repo.all(query)
{:ok, pools}
end
defp mark_as_deleted(repo, changes_list, %{timeout: timeout}) when is_list(changes_list) do defp mark_as_deleted(repo, changes_list, %{timeout: timeout}) when is_list(changes_list) do
addresses = Enum.map(changes_list, & &1.staking_address_hash) addresses = Enum.map(changes_list, & &1.staking_address_hash)
@ -62,12 +80,8 @@ defmodule Explorer.Chain.Import.Runner.StakingPools do
from( from(
pool in StakingPool, pool in StakingPool,
where: pool.staking_address_hash not in ^addresses, where: pool.staking_address_hash not in ^addresses,
update: [ # ShareLocks order already enforced by `acquire_all_staking_pools` (see docs: sharelocks.md)
set: [ update: [set: [is_deleted: true, is_active: false]]
is_deleted: true,
is_active: false
]
]
) )
try do try do
@ -90,10 +104,13 @@ defmodule Explorer.Chain.Import.Runner.StakingPools do
defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# Enforce StackingPool ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, & &1.staking_address_hash)
{:ok, _} = {:ok, _} =
Import.insert_changes_list( Import.insert_changes_list(
repo, repo,
changes_list, ordered_changes_list,
conflict_target: :staking_address_hash, conflict_target: :staking_address_hash,
on_conflict: on_conflict, on_conflict: on_conflict,
for: StakingPool, for: StakingPool,
@ -138,10 +155,11 @@ defmodule Explorer.Chain.Import.Runner.StakingPools do
total = repo.one!(total_query) total = repo.one!(total_query)
if total > Decimal.new(0) do if total > Decimal.new(0) do
query = update_query =
from( from(
p in StakingPool, p in StakingPool,
where: p.is_active == true, where: p.is_active == true,
# ShareLocks order already enforced by `acquire_all_staking_pools` (see docs: sharelocks.md)
update: [ update: [
set: [ set: [
staked_ratio: p.staked_amount / ^total * 100, staked_ratio: p.staked_amount / ^total * 100,
@ -150,7 +168,8 @@ defmodule Explorer.Chain.Import.Runner.StakingPools do
] ]
) )
{count, _} = repo.update_all(query, [], timeout: timeout) {count, _} = repo.update_all(update_query, [], timeout: timeout)
{:ok, count} {:ok, count}
else else
{:ok, 1} {:ok, 1}

@ -59,10 +59,13 @@ defmodule Explorer.Chain.Import.Runner.StakingPoolsDelegators do
defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# Enforce StackingPoolDelegator ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.delegator_address_hash, &1.pool_address_hash})
{:ok, _} = {:ok, _} =
Import.insert_changes_list( Import.insert_changes_list(
repo, repo,
changes_list, ordered_changes_list,
conflict_target: [:pool_address_hash, :delegator_address_hash], conflict_target: [:pool_address_hash, :delegator_address_hash],
on_conflict: on_conflict, on_conflict: on_conflict,
for: StakingPoolsDelegator, for: StakingPoolsDelegator,

@ -54,7 +54,7 @@ defmodule Explorer.Chain.Import.Runner.TokenTransfers do
def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# order so that row ShareLocks are grabbed in a consistent order # Enforce TokenTransfer ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.transaction_hash, &1.log_index}) ordered_changes_list = Enum.sort_by(changes_list, &{&1.transaction_hash, &1.log_index})
{:ok, _} = {:ok, _} =

@ -7,7 +7,6 @@ defmodule Explorer.Chain.Import.Runner.Tokens do
import Ecto.Query, only: [from: 2] import Ecto.Query, only: [from: 2]
alias Ecto.Adapters.SQL
alias Ecto.{Multi, Repo} alias Ecto.{Multi, Repo}
alias Explorer.Chain.{Hash, Import, Token} alias Explorer.Chain.{Hash, Import, Token}
@ -22,10 +21,61 @@ defmodule Explorer.Chain.Import.Runner.Tokens do
@type holder_count :: non_neg_integer() @type holder_count :: non_neg_integer()
@type token_holder_count :: %{contract_address_hash: Hash.Address.t(), count: holder_count()} @type token_holder_count :: %{contract_address_hash: Hash.Address.t(), count: holder_count()}
def update_holder_counts_with_deltas(repo, token_holder_count_deltas, options) do def acquire_contract_address_tokens(repo, contract_address_hashes) do
parameters = token_holder_count_deltas_to_parameters(token_holder_count_deltas) token_query =
from(
token in Token,
where: token.contract_address_hash in ^contract_address_hashes,
# Enforce Token ShareLocks order (see docs: sharelocks.md)
order_by: token.contract_address_hash,
lock: "FOR UPDATE"
)
tokens = repo.all(token_query)
{:ok, tokens}
end
def update_holder_counts_with_deltas(repo, token_holder_count_deltas, %{
timeout: timeout,
timestamps: %{updated_at: updated_at}
}) do
# NOTE that acquire_contract_address_tokens needs to be called before this
{hashes, deltas} =
token_holder_count_deltas
|> Enum.map(fn %{contract_address_hash: contract_address_hash, delta: delta} ->
{:ok, contract_address_hash_bytes} = Hash.Address.dump(contract_address_hash)
{contract_address_hash_bytes, delta}
end)
|> Enum.unzip()
query =
from(
token in Token,
join:
deltas in fragment(
"(SELECT unnest(?::bytea[]) as contract_address_hash, unnest(?::bigint[]) as delta)",
^hashes,
^deltas
),
on: token.contract_address_hash == deltas.contract_address_hash,
where: not is_nil(token.holder_count),
# ShareLocks order already enforced by `acquire_contract_address_tokens` (see docs: sharelocks.md)
update: [
set: [
holder_count: token.holder_count + deltas.delta,
updated_at: ^updated_at
]
],
select: %{
contract_address_hash: token.contract_address_hash,
holder_count: token.holder_count
}
)
{_total, result} = repo.update_all(query, [], timeout: timeout)
update_holder_counts_with_parameters(repo, parameters, options) {:ok, result}
end end
@impl Import.Runner @impl Import.Runner
@ -71,7 +121,7 @@ defmodule Explorer.Chain.Import.Runner.Tokens do
changes_list changes_list
# brand new tokens start with no holders # brand new tokens start with no holders
|> Stream.map(&Map.put_new(&1, :holder_count, 0)) |> Stream.map(&Map.put_new(&1, :holder_count, 0))
# order so that row ShareLocks are grabbed in a consistent order # Enforce Token ShareLocks order (see docs: sharelocks.md)
|> Enum.sort_by(& &1.contract_address_hash) |> Enum.sort_by(& &1.contract_address_hash)
{:ok, _} = {:ok, _} =
@ -117,69 +167,4 @@ defmodule Explorer.Chain.Import.Runner.Tokens do
) )
) )
end end
defp token_holder_count_deltas_to_parameters(token_holder_count_deltas) when is_list(token_holder_count_deltas) do
Enum.flat_map(token_holder_count_deltas, fn
%{contract_address_hash: contract_address_hash, delta: delta} ->
{:ok, contract_address_hash_bytes} = Hash.Address.dump(contract_address_hash)
[contract_address_hash_bytes, delta]
end)
end
defp update_holder_counts_with_parameters(_, [], _), do: {:ok, []}
# sobelow_skip ["SQL.Query"]
defp update_holder_counts_with_parameters(repo, parameters, %{timeout: timeout, timestamps: %{updated_at: updated_at}})
when is_list(parameters) do
update_sql = update_holder_counts_sql(parameters)
with {:ok, %Postgrex.Result{columns: ["contract_address_hash", "holder_count"], command: :update, rows: rows}} <-
SQL.query(repo, update_sql, [updated_at | parameters], timeout: timeout) do
update_token_holder_counts =
Enum.map(rows, fn [contract_address_hash_bytes, holder_count] ->
{:ok, contract_address_hash} = Hash.Address.cast(contract_address_hash_bytes)
%{contract_address_hash: contract_address_hash, holder_count: holder_count}
end)
{:ok, update_token_holder_counts}
end
end
defp update_holder_counts_sql(parameters) when is_list(parameters) do
parameters
|> Enum.count()
|> div(2)
|> update_holder_counts_sql()
end
defp update_holder_counts_sql(row_count) when is_integer(row_count) do
parameters_sql =
update_holder_counts_parameters_sql(
row_count,
# skip $1 as it is used for the common `updated_at` timestamp
2
)
"""
UPDATE tokens
SET holder_count = holder_count + holder_counts.delta,
updated_at = $1
FROM (
VALUES
#{parameters_sql}
) AS holder_counts(contract_address_hash, delta)
WHERE tokens.contract_address_hash = holder_counts.contract_address_hash AND
holder_count IS NOT NULL
RETURNING tokens.contract_address_hash, tokens.holder_count
"""
end
defp update_holder_counts_parameters_sql(row_count, start) when is_integer(row_count) do
Enum.map_join(0..(row_count - 1), ",\n ", fn i ->
contract_address_hash_parameter_number = 2 * i + start
holder_count_number = contract_address_hash_parameter_number + 1
"($#{contract_address_hash_parameter_number}::bytea, $#{holder_count_number}::bigint)"
end)
end
end end

@ -58,8 +58,8 @@ defmodule Explorer.Chain.Import.Runner.Transaction.Forks do
defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0)
# order so that row ShareLocks are grabbed in a consistent order # Enforce Fork ShareLocks order (see docs: sharelocks.md)
ordered_changes_list = Enum.sort_by(changes_list, &{&1.uncle_hash, &1.hash}) ordered_changes_list = Enum.sort_by(changes_list, &{&1.uncle_hash, &1.index})
Import.insert_changes_list( Import.insert_changes_list(
repo, repo,

@ -42,13 +42,14 @@ defmodule Explorer.Chain.Import.Runner.Transactions do
|> Map.put(:timestamps, timestamps) |> Map.put(:timestamps, timestamps)
|> Map.put(:token_transfer_transaction_hash_set, token_transfer_transaction_hash_set(options)) |> Map.put(:token_transfer_transaction_hash_set, token_transfer_transaction_hash_set(options))
# Enforce ShareLocks tables order (see docs: sharelocks.md)
multi multi
|> Multi.run(:recollated_transactions, fn repo, _ ->
discard_blocks_for_recollated_transactions(repo, changes_list, insert_options)
end)
|> Multi.run(:transactions, fn repo, _ -> |> Multi.run(:transactions, fn repo, _ ->
insert(repo, changes_list, insert_options) insert(repo, changes_list, insert_options)
end) end)
|> Multi.run(:recollated_transactions, fn repo, %{transactions: transactions} ->
discard_blocks_for_recollated_transactions(repo, transactions, insert_options)
end)
end end
@impl Import.Runner @impl Import.Runner
@ -81,7 +82,7 @@ defmodule Explorer.Chain.Import.Runner.Transactions do
ordered_changes_list = ordered_changes_list =
changes_list changes_list
|> put_internal_transactions_indexed_at(inserted_at, token_transfer_transaction_hash_set) |> put_internal_transactions_indexed_at(inserted_at, token_transfer_transaction_hash_set)
# order so that row ShareLocks are grabbed in a consistent order # Enforce Transaction ShareLocks order (see docs: sharelocks.md)
|> Enum.sort_by(& &1.hash) |> Enum.sort_by(& &1.hash)
Import.insert_changes_list( Import.insert_changes_list(
@ -186,41 +187,64 @@ defmodule Explorer.Chain.Import.Runner.Transactions do
defp put_internal_transactions_indexed_at?(_, _), do: false defp put_internal_transactions_indexed_at?(_, _), do: false
defp discard_blocks_for_recollated_transactions(repo, transactions, %{ defp discard_blocks_for_recollated_transactions(repo, changes_list, %{
timeout: timeout, timeout: timeout,
timestamps: %{updated_at: updated_at} timestamps: %{updated_at: updated_at}
}) })
when is_list(transactions) do when is_list(changes_list) do
ordered_block_hashes = {transactions_hashes, transactions_block_hashes} =
transactions changes_list
|> Enum.filter(fn %{block_hash: block_hash, old_block_hash: old_block_hash} -> |> Enum.filter(&Map.has_key?(&1, :block_hash))
not is_nil(old_block_hash) and block_hash != old_block_hash |> Enum.map(fn %{hash: hash, block_hash: block_hash} ->
{:ok, hash_bytes} = Hash.Full.dump(hash)
{:ok, block_hash_bytes} = Hash.Full.dump(block_hash)
{hash_bytes, block_hash_bytes}
end) end)
|> MapSet.new(& &1.old_block_hash) |> Enum.unzip()
|> Enum.sort()
blocks_with_recollated_transactions =
if Enum.empty?(ordered_block_hashes) do from(
transaction in Transaction,
join:
new_transaction in fragment(
"(SELECT unnest(?::bytea[]) as hash, unnest(?::bytea[]) as block_hash)",
^transactions_hashes,
^transactions_block_hashes
),
on: transaction.hash == new_transaction.hash,
where: transaction.block_hash != new_transaction.block_hash,
select: transaction.block_hash
)
block_hashes =
blocks_with_recollated_transactions
|> repo.all()
|> Enum.uniq()
if Enum.empty?(block_hashes) do
{:ok, []} {:ok, []}
else else
query = query =
from( from(
block in Block, block in Block,
where: block.hash in ^ordered_block_hashes, where: block.hash in ^block_hashes,
update: [ # Enforce Block ShareLocks order (see docs: sharelocks.md)
set: [ order_by: [asc: block.hash],
consensus: false, lock: "FOR UPDATE"
updated_at: ^updated_at
]
]
) )
try do try do
{_, result} = repo.update_all(query, [], timeout: timeout) {_, result} =
repo.update_all(
from(b in Block, join: s in subquery(query), on: b.hash == s.hash),
[set: [consensus: false, updated_at: updated_at]],
timeout: timeout
)
{:ok, result} {:ok, result}
rescue rescue
postgrex_error in Postgrex.Error -> postgrex_error in Postgrex.Error ->
{:error, %{exception: postgrex_error, block_hashes: ordered_block_hashes}} {:error, %{exception: postgrex_error, block_hashes: block_hashes}}
end end
end end
end end

@ -47,4 +47,24 @@ defmodule Explorer.Chain.Import.Stage do
runner.run(Multi.new(), changes_chunk, options) runner.run(Multi.new(), changes_chunk, options)
end) end)
end end
@spec single_multi([Runner.t()], runner_to_changes_list, %{optional(atom()) => term()}) ::
{Multi.t(), runner_to_changes_list}
def single_multi(runners, runner_to_changes_list, options) do
runners
|> Enum.reduce({Multi.new(), runner_to_changes_list}, fn runner, {multi, remaining_runner_to_changes_list} ->
{changes_list, new_remaining_runner_to_changes_list} = Map.pop(remaining_runner_to_changes_list, runner)
new_multi =
case changes_list do
nil ->
multi
_ ->
runner.run(multi, changes_list, options)
end
{new_multi, new_remaining_runner_to_changes_list}
end)
end
end end

@ -4,7 +4,6 @@ defmodule Explorer.Chain.Import.Stage.AddressReferencing do
`Explorer.Chain.Import.Stage.Addresses`. `Explorer.Chain.Import.Stage.Addresses`.
""" """
alias Ecto.Multi
alias Explorer.Chain.Import.{Runner, Stage} alias Explorer.Chain.Import.{Runner, Stage}
@behaviour Stage @behaviour Stage
@ -14,17 +13,6 @@ defmodule Explorer.Chain.Import.Stage.AddressReferencing do
do: [ do: [
Runner.Address.CoinBalances, Runner.Address.CoinBalances,
Runner.Blocks, Runner.Blocks,
Runner.Block.Rewards,
Runner.Block.SecondDegreeRelations,
Runner.Transactions,
Runner.Transaction.Forks,
Runner.InternalTransactions,
Runner.InternalTransactionsIndexedAtBlocks,
Runner.Logs,
Runner.Tokens,
Runner.TokenTransfers,
Runner.Address.CurrentTokenBalances,
Runner.Address.TokenBalances,
Runner.StakingPools, Runner.StakingPools,
Runner.StakingPoolsDelegators Runner.StakingPoolsDelegators
] ]
@ -32,21 +20,7 @@ defmodule Explorer.Chain.Import.Stage.AddressReferencing do
@impl Stage @impl Stage
def multis(runner_to_changes_list, options) do def multis(runner_to_changes_list, options) do
{final_multi, final_remaining_runner_to_changes_list} = {final_multi, final_remaining_runner_to_changes_list} =
runners() Stage.single_multi(runners(), runner_to_changes_list, options)
|> Enum.reduce({Multi.new(), runner_to_changes_list}, fn runner, {multi, remaining_runner_to_changes_list} ->
{changes_list, new_remaining_runner_to_changes_list} = Map.pop(remaining_runner_to_changes_list, runner)
new_multi =
case changes_list do
nil ->
multi
_ ->
runner.run(multi, changes_list, options)
end
{new_multi, new_remaining_runner_to_changes_list}
end)
{[final_multi], final_remaining_runner_to_changes_list} {[final_multi], final_remaining_runner_to_changes_list}
end end

@ -0,0 +1,30 @@
defmodule Explorer.Chain.Import.Stage.BlockFollowing do
@moduledoc """
Imports any tables that follows and cannot be imported at the same time as
those imported by `Explorer.Chain.Import.Stage.Addresses`,
`Explorer.Chain.Import.Stage.AddressReferencing` and
`Explorer.Chain.Import.Stage.BlockReferencing`
"""
alias Explorer.Chain.Import.{Runner, Stage}
@behaviour Stage
@impl Stage
def runners,
do: [
Runner.InternalTransactionsIndexedAtBlocks,
Runner.Block.SecondDegreeRelations,
Runner.Block.Rewards,
Runner.InternalTransactions,
Runner.Address.CurrentTokenBalances
]
@impl Stage
def multis(runner_to_changes_list, options) do
{final_multi, final_remaining_runner_to_changes_list} =
Stage.single_multi(runners(), runner_to_changes_list, options)
{[final_multi], final_remaining_runner_to_changes_list}
end
end

@ -0,0 +1,30 @@
defmodule Explorer.Chain.Import.Stage.BlockReferencing do
@moduledoc """
Imports any tables that reference `t:Explorer.Chain.Block.t/0` and that were
imported by `Explorer.Chain.Import.Stage.Addresses` and
`Explorer.Chain.Import.Stage.AddressReferencing`.
"""
alias Explorer.Chain.Import.{Runner, Stage}
@behaviour Stage
@impl Stage
def runners,
do: [
Runner.Transactions,
Runner.Transaction.Forks,
Runner.Logs,
Runner.Tokens,
Runner.TokenTransfers,
Runner.Address.TokenBalances
]
@impl Stage
def multis(runner_to_changes_list, options) do
{final_multi, final_remaining_runner_to_changes_list} =
Stage.single_multi(runners(), runner_to_changes_list, options)
{[final_multi], final_remaining_runner_to_changes_list}
end
end

@ -12,6 +12,8 @@ defmodule Explorer.ChainSpec.Parity.Importer do
alias Explorer.ChainSpec.GenesisData alias Explorer.ChainSpec.GenesisData
alias Explorer.ChainSpec.POA.Importer, as: PoaEmissionImporter alias Explorer.ChainSpec.POA.Importer, as: PoaEmissionImporter
import Ecto.Query
@max_block_number :infinity @max_block_number :infinity
def import_emission_rewards(chain_spec) do def import_emission_rewards(chain_spec) do
@ -46,8 +48,27 @@ defmodule Explorer.ChainSpec.Parity.Importer do
defp import_rewards_from_chain_spec(chain_spec) do defp import_rewards_from_chain_spec(chain_spec) do
rewards = emission_rewards(chain_spec) rewards = emission_rewards(chain_spec)
{_, nil} = Repo.delete_all(EmissionReward) inner_delete_query =
{_, nil} = Repo.insert_all(EmissionReward, rewards) from(
emission_reward in EmissionReward,
# Enforce EmissionReward ShareLocks order (see docs: sharelocks.md)
order_by: emission_reward.block_range,
lock: "FOR UPDATE"
)
delete_query =
from(
e in EmissionReward,
join: s in subquery(inner_delete_query),
# we join on reward because it's faster and we have to delete them all anyway
on: e.reward == s.reward
)
# Enforce EmissionReward ShareLocks order (see docs: sharelocks.md)
ordered_rewards = Enum.sort_by(rewards, & &1.block_range)
{_, nil} = Repo.delete_all(delete_query)
{_, nil} = Repo.insert_all(EmissionReward, ordered_rewards)
end end
def genesis_coin_balances(chain_spec) do def genesis_coin_balances(chain_spec) do

@ -11,6 +11,8 @@ defmodule Explorer.ChainSpec.POA.Importer do
alias Explorer.Chain.Block.{EmissionReward, Range} alias Explorer.Chain.Block.{EmissionReward, Range}
alias Explorer.ChainSpec.GenesisData alias Explorer.ChainSpec.GenesisData
import Ecto.Query
@block_reward_amount_abi %{ @block_reward_amount_abi %{
"type" => "function", "type" => "function",
"stateMutability" => "view", "stateMutability" => "view",
@ -51,8 +53,27 @@ defmodule Explorer.ChainSpec.POA.Importer do
} }
] ]
{_, nil} = Repo.delete_all(EmissionReward) inner_delete_query =
{_, nil} = Repo.insert_all(EmissionReward, rewards) from(
emission_reward in EmissionReward,
# Enforce EmissionReward ShareLocks order (see docs: sharelocks.md)
order_by: emission_reward.block_range,
lock: "FOR UPDATE"
)
delete_query =
from(
e in EmissionReward,
join: s in subquery(inner_delete_query),
# we join on reward because it's faster and we have to delete them all anyway
on: e.reward == s.reward
)
# Enforce EmissionReward ShareLocks order (see docs: sharelocks.md)
ordered_rewards = Enum.sort_by(rewards, & &1.block_range)
{_, nil} = Repo.delete_all(delete_query)
{_, nil} = Repo.insert_all(EmissionReward, ordered_rewards)
end end
end end

@ -0,0 +1,125 @@
defmodule Explorer.Counters.AddressesCounter do
@moduledoc """
Caches the number of all addresses.
It loads the count asynchronously and in a time interval of 30 minutes.
"""
use GenServer
alias Explorer.Chain
@table :addresses_counter
@cache_key "addresses"
def table_name do
@table
end
def cache_key do
@cache_key
end
# It is undesirable to automatically start the consolidation in all environments.
# Consider the test environment: if the consolidation initiates but does not
# finish before a test ends, that test will fail. This way, hundreds of
# tests were failing before disabling the consolidation and the scheduler in
# the test env.
config = Application.get_env(:explorer, Explorer.Counters.AddressesCounter)
@enable_consolidation Keyword.get(config, :enable_consolidation)
@update_interval_in_seconds Keyword.get(config, :update_interval_in_seconds)
@doc """
Starts a process to periodically update the counter of the token holders.
"""
@spec start_link(term()) :: GenServer.on_start()
def start_link(_) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
@impl true
def init(_args) do
create_table()
{:ok, %{consolidate?: enable_consolidation?()}, {:continue, :ok}}
end
def create_table do
opts = [
:set,
:named_table,
:public,
read_concurrency: true
]
:ets.new(table_name(), opts)
end
defp schedule_next_consolidation do
Process.send_after(self(), :consolidate, :timer.seconds(@update_interval_in_seconds))
end
@doc """
Inserts new items into the `:ets` table.
"""
def insert_counter({key, info}) do
:ets.insert(table_name(), {key, info})
end
@impl true
def handle_continue(:ok, %{consolidate?: true} = state) do
consolidate()
schedule_next_consolidation()
{:noreply, state}
end
@impl true
def handle_continue(:ok, state) do
{:noreply, state}
end
@impl true
def handle_info(:consolidate, state) do
consolidate()
schedule_next_consolidation()
{:noreply, state}
end
@doc """
Fetches the info for a specific item from the `:ets` table.
"""
def fetch do
do_fetch(:ets.lookup(table_name(), cache_key()))
end
defp do_fetch([{_, result}]), do: result
defp do_fetch([]), do: 0
@doc """
Consolidates the info by populating the `:ets` table with the current database information.
"""
def consolidate do
counter = Chain.count_addresses()
insert_counter({cache_key(), counter})
end
@doc """
Returns a boolean that indicates whether consolidation is enabled
In order to choose whether or not to enable the scheduler and the initial
consolidation, change the following Explorer config:
`config :explorer, Explorer.Counters.AddressesCounter, enable_consolidation: true`
to:
`config :explorer, Explorer.Counters.AddressesCounter, enable_consolidation: false`
"""
def enable_consolidation?, do: @enable_consolidation
end

@ -26,7 +26,7 @@ defmodule Explorer.ExchangeRates.Source.CoinGecko do
[ [
%Token{ %Token{
available_supply: to_decimal(market_data["circulating_supply"]), available_supply: to_decimal(market_data["circulating_supply"]),
total_supply: to_decimal(market_data["total_supply"]), total_supply: to_decimal(market_data["total_supply"]) || to_decimal(market_data["circulating_supply"]),
btc_value: btc_value, btc_value: btc_value,
id: json_data["id"], id: json_data["id"],
last_updated: last_updated, last_updated: last_updated,
@ -44,15 +44,41 @@ defmodule Explorer.ExchangeRates.Source.CoinGecko do
@impl Source @impl Source
def source_url do def source_url do
"#{base_url()}/coins/#{coin_id()}" {:ok, id} = coin_id()
"#{base_url()}/coins/#{id}"
end end
defp base_url do defp base_url do
config(:base_url) || "https://api.coingecko.com/api/v3" config(:base_url) || "https://api.coingecko.com/api/v3"
end end
defp coin_id do def coin_id do
Application.get_env(:explorer, __MODULE__)[:coin_id] url = "#{base_url()}/coins/list"
symbol = String.downcase(Explorer.coin())
case HTTPoison.get(url, headers()) do
{:ok, %Response{body: body, status_code: 200}} ->
data = decode_json(body)
symbol_data =
Enum.find(data, fn item ->
item["symbol"] == symbol
end)
if symbol_data do
{:ok, symbol_data["id"]}
else
{:error, :not_found}
end
{:ok, %Response{body: body, status_code: status_code}} when status_code in 400..499 ->
{:error, decode_json(body)["error"]}
{:error, %Error{reason: reason}} ->
{:error, reason}
end
end end
defp get_btc_price(currency \\ "usd") do defp get_btc_price(currency \\ "usd") do

@ -41,9 +41,12 @@ defmodule Explorer.Market do
@doc false @doc false
def bulk_insert_history(records) do def bulk_insert_history(records) do
records_without_zeroes = records_without_zeroes =
Enum.reject(records, fn item -> records
|> Enum.reject(fn item ->
Decimal.equal?(item.closing_price, 0) && Decimal.equal?(item.opening_price, 0) Decimal.equal?(item.closing_price, 0) && Decimal.equal?(item.opening_price, 0)
end) end)
# Enforce MarketHistory ShareLocks order (see docs: sharelocks.md)
|> Enum.sort_by(& &1.date)
Repo.insert_all(MarketHistory, records_without_zeroes, on_conflict: :nothing, conflict_target: [:date]) Repo.insert_all(MarketHistory, records_without_zeroes, on_conflict: :nothing, conflict_target: [:date])
end end

@ -69,7 +69,8 @@ defmodule Explorer.SmartContract.Verifier do
blockchain_bytecode_without_whisper = extract_bytecode(blockchain_bytecode) blockchain_bytecode_without_whisper = extract_bytecode(blockchain_bytecode)
cond do cond do
generated_bytecode != blockchain_bytecode_without_whisper -> generated_bytecode != blockchain_bytecode_without_whisper &&
!try_library_verification(generated_bytecode, blockchain_bytecode_without_whisper) ->
{:error, :generated_bytecode} {:error, :generated_bytecode}
has_constructor_with_params?(abi) && has_constructor_with_params?(abi) &&
@ -81,6 +82,18 @@ defmodule Explorer.SmartContract.Verifier do
end end
end end
# 730000000000000000000000000000000000000000 - default library address that returned by the compiler
defp try_library_verification(
"730000000000000000000000000000000000000000" <> bytecode,
<<_address::binary-size(42)>> <> bytecode
) do
true
end
defp try_library_verification(_, _) do
false
end
@doc """ @doc """
In order to discover the bytecode we need to remove the `swarm source` from In order to discover the bytecode we need to remove the `swarm source` from
the hash. the hash.

@ -8,7 +8,10 @@ defmodule Explorer.Validator.MetadataImporter do
import Ecto.Query, only: [from: 2] import Ecto.Query, only: [from: 2]
def import_metadata(metadata_maps) do def import_metadata(metadata_maps) do
Repo.transaction(fn -> Enum.each(metadata_maps, &upsert_validator_metadata(&1)) end) # Enforce Name ShareLocks order (see docs: sharelocks.md)
ordered_metadata_maps = Enum.sort_by(metadata_maps, &{&1.address_hash, &1.name})
Repo.transaction(fn -> Enum.each(ordered_metadata_maps, &upsert_validator_metadata(&1)) end)
end end
defp upsert_validator_metadata(validator_changeset) do defp upsert_validator_metadata(validator_changeset) do

@ -0,0 +1,7 @@
defmodule Explorer.Repo.Migrations.CreateIndexesForBlockNumberInTokenTransfersAndTransactions do
use Ecto.Migration
def change do
create_if_not_exists(index(:token_transfers, [:block_number]))
end
end

@ -0,0 +1,42 @@
defmodule Explorer.Chain.Cache.AccountsTest do
use Explorer.DataCase
alias Explorer.Chain.Cache.Accounts
alias Explorer.Repo
describe "drop/1" do
test "does not drop the cache if the address fetched_coin_balance has not changed" do
address =
insert(:address, fetched_coin_balance: 100_000, fetched_coin_balance_block_number: 1)
|> preload_names()
Accounts.update(address)
assert Accounts.take(1) == [address]
Accounts.drop(address)
assert Accounts.take(1) == [address]
end
test "drops the cache if an address was in the cache with a different fetched_coin_balance" do
address =
insert(:address, fetched_coin_balance: 100_000, fetched_coin_balance_block_number: 1)
|> preload_names()
Accounts.update(address)
assert Accounts.take(1) == [address]
updated_address = %{address | fetched_coin_balance: 100_001}
Accounts.drop(updated_address)
assert Accounts.take(1) == []
end
end
defp preload_names(address) do
Repo.preload(address, [:names])
end
end

@ -0,0 +1,69 @@
defmodule Explorer.Chain.Import.Runner.AddressesTest do
use Explorer.DataCase
alias Ecto.Multi
alias Explorer.Chain.{Address, Wei}
alias Explorer.Chain.Import.Runner.Addresses
alias Explorer.Repo
describe "run/1" do
test "does not update fetched_coin_balance if original value is not nil but new value is nil" do
block_number = 5
original_address = insert(:address, fetched_coin_balance: 5, fetched_coin_balance_block_number: block_number)
new_params = %{
fetched_coin_balance: nil,
fetched_coin_balance_block_number: block_number,
hash: to_string(original_address.hash)
}
changeset = Address.balance_changeset(%Address{}, new_params)
wei = original_address.fetched_coin_balance
assert {:ok,
%{
addresses: [
%Address{
fetched_coin_balance: ^wei,
fetched_coin_balance_block_number: 5
}
]
}} = run([changeset.changes])
end
test "updates fetched_coin_balance if original value is nil and new value is not nil" do
block_number = 5
original_address = insert(:address, fetched_coin_balance: nil, fetched_coin_balance_block_number: block_number)
new_params = %{
fetched_coin_balance: 5,
fetched_coin_balance_block_number: block_number,
hash: to_string(original_address.hash)
}
changeset = Address.balance_changeset(%Address{}, new_params)
wei = %Wei{value: Decimal.new(new_params.fetched_coin_balance)}
assert {:ok,
%{
addresses: [
%Address{
fetched_coin_balance: ^wei,
fetched_coin_balance_block_number: 5
}
]
}} = run([changeset.changes])
end
end
defp run(changes) do
timestamp = DateTime.utc_now()
options = %{timestamps: %{inserted_at: timestamp, updated_at: timestamp}}
Multi.new()
|> Addresses.run(changes, options)
|> Repo.transaction()
end
end

@ -7,13 +7,13 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
alias Ecto.Multi alias Ecto.Multi
alias Explorer.Chain.Import.Runner.{Blocks, Transactions} alias Explorer.Chain.Import.Runner.{Blocks, Transactions}
alias Explorer.Chain.{Address, Block, Transaction} alias Explorer.Chain.{Address, Block, InternalTransaction, Log, Transaction, TokenTransfer}
alias Explorer.Chain alias Explorer.{Chain, Repo}
alias Explorer.Repo
describe "run/1" do describe "run/1" do
setup do setup do
block = insert(:block, consensus: true) miner = insert(:address)
block = params_for(:block, consensus: true, miner_hash: miner.hash)
timestamp = DateTime.utc_now() timestamp = DateTime.utc_now()
options = %{timestamps: %{inserted_at: timestamp, updated_at: timestamp}} options = %{timestamps: %{inserted_at: timestamp, updated_at: timestamp}}
@ -22,9 +22,11 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
end end
test "derive_transaction_forks replaces hash on conflicting (uncle_hash, index)", %{ test "derive_transaction_forks replaces hash on conflicting (uncle_hash, index)", %{
consensus_block: %Block{hash: block_hash, miner_hash: miner_hash, number: block_number} = consensus_block, consensus_block: %{hash: block_hash, miner_hash: miner_hash, number: block_number},
options: options options: options
} do } do
consensus_block = insert(:block, %{hash: block_hash, number: block_number})
transaction = transaction =
:transaction :transaction
|> insert() |> insert()
@ -81,7 +83,7 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
end end
test "delete_address_current_token_balances deletes rows with matching block number when consensus is true", test "delete_address_current_token_balances deletes rows with matching block number when consensus is true",
%{consensus_block: %Block{number: block_number} = block, options: options} do %{consensus_block: %{number: block_number} = block, options: options} do
%Address.CurrentTokenBalance{address_hash: address_hash, token_contract_address_hash: token_contract_address_hash} = %Address.CurrentTokenBalance{address_hash: address_hash, token_contract_address_hash: token_contract_address_hash} =
insert(:address_current_token_balance, block_number: block_number) insert(:address_current_token_balance, block_number: block_number)
@ -98,7 +100,7 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
end end
test "delete_address_current_token_balances does not delete rows with matching block number when consensus is false", test "delete_address_current_token_balances does not delete rows with matching block number when consensus is false",
%{consensus_block: %Block{number: block_number} = block, options: options} do %{consensus_block: %{number: block_number} = block, options: options} do
%Address.CurrentTokenBalance{} = insert(:address_current_token_balance, block_number: block_number) %Address.CurrentTokenBalance{} = insert(:address_current_token_balance, block_number: block_number)
count = 1 count = 1
@ -113,8 +115,80 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
assert count(Address.CurrentTokenBalance) == count assert count(Address.CurrentTokenBalance) == count
end end
test "remove_nonconsensus_token_transfers deletes token transfer rows with matching block number when new consensus block is inserted",
%{consensus_block: %{number: block_number} = block, options: options} do
consensus_block = insert(:block, number: block_number, consensus: true)
transaction = insert(:transaction) |> with_block(consensus_block)
%TokenTransfer{transaction_hash: transaction_hash, log_index: log_index} =
insert(:token_transfer, block_number: block_number, transaction: transaction)
assert count(TokenTransfer) == 1
assert {:ok,
%{
remove_nonconsensus_token_transfers: [
%{transaction_hash: ^transaction_hash, log_index: ^log_index}
]
}} = run_block_consensus_change(block, true, options)
assert count(TokenTransfer) == 0
end
test "remove_nonconsensus_token_transfers does not delete token transfer rows with matching block number when new consensus block wasn't inserted",
%{consensus_block: %{number: block_number} = block, options: options} do
consensus_block = insert(:block, number: block_number, consensus: true)
transaction = insert(:transaction) |> with_block(consensus_block)
insert(:token_transfer, block_number: block_number, transaction: transaction)
count = 1
assert count(TokenTransfer) == count
assert {:ok, %{remove_nonconsensus_token_transfers: []}} = run_block_consensus_change(block, false, options)
assert count(TokenTransfer) == count
end
test "remove_nonconsensus_logs deletes nonconsensus logs", %{
consensus_block: %{number: block_number} = block,
options: options
} do
old_block = insert(:block, number: block_number, consensus: true)
forked_transaction = :transaction |> insert() |> with_block(old_block)
%Log{transaction_hash: hash, index: index} = insert(:log, transaction: forked_transaction)
assert count(Log) == 1
assert {:ok, %{remove_nonconsensus_logs: [%{transaction_hash: ^hash, index: ^index}]}} =
run_block_consensus_change(block, true, options)
assert count(Log) == 0
end
test "remove_nonconsensus_internal_transactions deletes nonconsensus internal transactions", %{
consensus_block: %{number: block_number} = block,
options: options
} do
old_block = insert(:block, number: block_number, consensus: true)
forked_transaction = :transaction |> insert() |> with_block(old_block)
%InternalTransaction{index: index, transaction_hash: hash} =
insert(:internal_transaction, index: 0, transaction: forked_transaction)
assert count(InternalTransaction) == 1
assert {:ok, %{remove_nonconsensus_internal_transactions: [%{transaction_hash: ^hash, index: ^index}]}} =
run_block_consensus_change(block, true, options)
assert count(InternalTransaction) == 0
end
test "derive_address_current_token_balances inserts rows if there is an address_token_balance left for the rows deleted by delete_address_current_token_balances", test "derive_address_current_token_balances inserts rows if there is an address_token_balance left for the rows deleted by delete_address_current_token_balances",
%{consensus_block: %Block{number: block_number} = block, options: options} do %{consensus_block: %{number: block_number} = block, options: options} do
token = insert(:token) token = insert(:token)
token_contract_address_hash = token.contract_address_hash token_contract_address_hash = token.contract_address_hash
@ -172,7 +246,7 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
end end
test "a non-holder reverting to a holder increases the holder_count", test "a non-holder reverting to a holder increases the holder_count",
%{consensus_block: %Block{hash: block_hash, miner_hash: miner_hash, number: block_number}, options: options} do %{consensus_block: %{hash: block_hash, miner_hash: miner_hash, number: block_number}, options: options} do
token = insert(:token) token = insert(:token)
token_contract_address_hash = token.contract_address_hash token_contract_address_hash = token.contract_address_hash
@ -204,7 +278,7 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
end end
test "a holder reverting to a non-holder decreases the holder_count", test "a holder reverting to a non-holder decreases the holder_count",
%{consensus_block: %Block{hash: block_hash, miner_hash: miner_hash, number: block_number}, options: options} do %{consensus_block: %{hash: block_hash, miner_hash: miner_hash, number: block_number}, options: options} do
token = insert(:token) token = insert(:token)
token_contract_address_hash = token.contract_address_hash token_contract_address_hash = token.contract_address_hash
@ -236,7 +310,7 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
end end
test "a non-holder becoming and a holder becoming while a holder becomes a non-holder cancels out and holder_count does not change", test "a non-holder becoming and a holder becoming while a holder becomes a non-holder cancels out and holder_count does not change",
%{consensus_block: %Block{number: block_number} = block, options: options} do %{consensus_block: %{number: block_number} = block, options: options} do
token = insert(:token) token = insert(:token)
token_contract_address_hash = token.contract_address_hash token_contract_address_hash = token.contract_address_hash
@ -262,7 +336,8 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
# Regression test for https://github.com/poanetwork/blockscout/issues/1644 # Regression test for https://github.com/poanetwork/blockscout/issues/1644
test "discards neighbouring blocks if they aren't related to the current one because of reorg and/or import timeout", test "discards neighbouring blocks if they aren't related to the current one because of reorg and/or import timeout",
%{consensus_block: %Block{number: block_number, hash: block_hash, miner_hash: miner_hash}, options: options} do %{consensus_block: %{number: block_number, hash: block_hash, miner_hash: miner_hash}, options: options} do
insert(:block, %{number: block_number, hash: block_hash})
old_block1 = params_for(:block, miner_hash: miner_hash, parent_hash: block_hash, number: block_number + 1) old_block1 = params_for(:block, miner_hash: miner_hash, parent_hash: block_hash, number: block_number + 1)
new_block1 = params_for(:block, miner_hash: miner_hash, parent_hash: block_hash, number: block_number + 1) new_block1 = params_for(:block, miner_hash: miner_hash, parent_hash: block_hash, number: block_number + 1)
@ -286,7 +361,8 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
# Regression test for https://github.com/poanetwork/blockscout/issues/1911 # Regression test for https://github.com/poanetwork/blockscout/issues/1911
test "forces block refetch if transaction is re-collated in a different block", test "forces block refetch if transaction is re-collated in a different block",
%{consensus_block: %Block{number: block_number, hash: block_hash, miner_hash: miner_hash}, options: options} do %{consensus_block: %{number: block_number, hash: block_hash, miner_hash: miner_hash}, options: options} do
insert(:block, %{number: block_number, hash: block_hash})
new_block1 = params_for(:block, miner_hash: miner_hash, parent_hash: block_hash, number: block_number + 1) new_block1 = params_for(:block, miner_hash: miner_hash, parent_hash: block_hash, number: block_number + 1)
new_block2 = params_for(:block, miner_hash: miner_hash, parent_hash: new_block1.hash, number: block_number + 2) new_block2 = params_for(:block, miner_hash: miner_hash, parent_hash: new_block1.hash, number: block_number + 2)
@ -365,7 +441,7 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do
end end
defp run_block_consensus_change( defp run_block_consensus_change(
%Block{hash: block_hash, miner_hash: miner_hash, number: block_number}, %{hash: block_hash, miner_hash: miner_hash, number: block_number},
consensus, consensus,
options options
) do ) do

@ -27,6 +27,7 @@ defmodule Explorer.ChainTest do
alias Explorer.Chain.Supply.ProofOfAuthority alias Explorer.Chain.Supply.ProofOfAuthority
alias Explorer.Counters.AddressesWithBalanceCounter alias Explorer.Counters.AddressesWithBalanceCounter
alias Explorer.Counters.AddressesCounter
doctest Explorer.Chain doctest Explorer.Chain
@ -50,6 +51,22 @@ defmodule Explorer.ChainTest do
end end
end end
describe "count_addresses_from_cache/0" do
test "returns the number of all addresses" do
insert(:address, fetched_coin_balance: 0)
insert(:address, fetched_coin_balance: 1)
insert(:address, fetched_coin_balance: 2)
start_supervised!(AddressesCounter)
AddressesCounter.consolidate()
addresses_with_balance = Chain.count_addresses_from_cache()
assert is_integer(addresses_with_balance)
assert addresses_with_balance == 3
end
end
describe "last_db_block_status/0" do describe "last_db_block_status/0" do
test "return no_blocks errors if db is empty" do test "return no_blocks errors if db is empty" do
assert {:error, :no_blocks} = Chain.last_db_block_status() assert {:error, :no_blocks} = Chain.last_db_block_status()
@ -3879,9 +3896,9 @@ defmodule Explorer.ChainTest do
address = insert(:address) address = insert(:address)
today = NaiveDateTime.utc_now() today = NaiveDateTime.utc_now()
noon = Timex.set(today, hour: 12) noon = Timex.set(today, hour: 12)
block = insert(:block, timestamp: noon) block = insert(:block, timestamp: noon, number: 50)
yesterday = Timex.shift(noon, days: -1) yesterday = Timex.shift(noon, days: -1)
block_one_day_ago = insert(:block, timestamp: yesterday) block_one_day_ago = insert(:block, timestamp: yesterday, number: 49)
insert(:fetched_balance, address_hash: address.hash, value: 1000, block_number: block.number) insert(:fetched_balance, address_hash: address.hash, value: 1000, block_number: block.number)
insert(:fetched_balance, address_hash: address.hash, value: 2000, block_number: block_one_day_ago.number) insert(:fetched_balance, address_hash: address.hash, value: 2000, block_number: block_one_day_ago.number)
@ -3908,6 +3925,22 @@ defmodule Explorer.ChainTest do
%{date: today |> NaiveDateTime.to_date() |> Date.to_string(), value: Decimal.new("1E-15")} %{date: today |> NaiveDateTime.to_date() |> Date.to_string(), value: Decimal.new("1E-15")}
] ]
end end
test "uses last block value if there a couple of change in the same day" do
address = insert(:address)
today = NaiveDateTime.utc_now()
past = Timex.shift(today, hours: -1)
block_now = insert(:block, timestamp: today, number: 1)
insert(:fetched_balance, address_hash: address.hash, value: 1, block_number: block_now.number)
block_past = insert(:block, timestamp: past, number: 2)
insert(:fetched_balance, address_hash: address.hash, value: 0, block_number: block_past.number)
[balance] = Chain.address_to_balances_by_day(address.hash)
assert balance.value == Decimal.new(0)
end
end end
describe "block_combined_rewards/1" do describe "block_combined_rewards/1" do

@ -0,0 +1,16 @@
defmodule Explorer.Counters.AddressesCounterTest do
use Explorer.DataCase
alias Explorer.Counters.AddressesCounter
test "populates the cache with the number of all addresses" do
insert(:address, fetched_coin_balance: 0)
insert(:address, fetched_coin_balance: 1)
insert(:address, fetched_coin_balance: 2)
start_supervised!(AddressesCounter)
AddressesCounter.consolidate()
assert AddressesCounter.fetch() == 3
end
end

@ -18,6 +18,46 @@ defmodule Explorer.ExchangeRates.Source.CoinGeckoTest do
} }
""" """
@coins_list """
[
{
"id": "poa-network",
"symbol": "poa",
"name": "POA Network"
},
{
"id": "poc-chain",
"symbol": "pocc",
"name": "POC Chain"
},
{
"id": "pocket-arena",
"symbol": "poc",
"name": "Pocket Arena"
},
{
"id": "ethereum",
"symbol": "eth",
"name": "Ethereum"
},
{
"id": "rootstock",
"symbol": "rbtc",
"name": "Rootstock RSK"
},
{
"id": "dai",
"symbol": "dai",
"name": "Dai"
},
{
"id": "callisto",
"symbol": "clo",
"name": "Callisto Network"
}
]
"""
describe "format_data/1" do describe "format_data/1" do
setup do setup do
bypass = Bypass.open() bypass = Bypass.open()
@ -62,4 +102,65 @@ defmodule Explorer.ExchangeRates.Source.CoinGeckoTest do
assert [] = CoinGecko.format_data(bad_data) assert [] = CoinGecko.format_data(bad_data)
end end
end end
describe "coin_id/0" do
setup do
bypass = Bypass.open()
Application.put_env(:explorer, CoinGecko, base_url: "http://localhost:#{bypass.port}")
on_exit(fn ->
Application.put_env(:explorer, :coin, "POA")
end)
{:ok, bypass: bypass}
end
test "fetches poa coin id by default", %{bypass: bypass} do
Bypass.expect(bypass, "GET", "/coins/list", fn conn ->
Conn.resp(conn, 200, @coins_list)
end)
assert CoinGecko.coin_id() == {:ok, "poa-network"}
end
test "fetches eth coin id", %{bypass: bypass} do
Application.put_env(:explorer, :coin, "ETH")
Bypass.expect(bypass, "GET", "/coins/list", fn conn ->
Conn.resp(conn, 200, @coins_list)
end)
assert CoinGecko.coin_id() == {:ok, "ethereum"}
end
test "fetches rbtc coin id", %{bypass: bypass} do
Application.put_env(:explorer, :coin, "RBTC")
Bypass.expect(bypass, "GET", "/coins/list", fn conn ->
Conn.resp(conn, 200, @coins_list)
end)
assert CoinGecko.coin_id() == {:ok, "rootstock"}
end
test "fetches dai coin id", %{bypass: bypass} do
Application.put_env(:explorer, :coin, "DAI")
Bypass.expect(bypass, "GET", "/coins/list", fn conn ->
Conn.resp(conn, 200, @coins_list)
end)
assert CoinGecko.coin_id() == {:ok, "dai"}
end
test "fetches callisto coin id", %{bypass: bypass} do
Application.put_env(:explorer, :coin, "CLO")
Bypass.expect(bypass, "GET", "/coins/list", fn conn ->
Conn.resp(conn, 200, @coins_list)
end)
assert CoinGecko.coin_id() == {:ok, "callisto"}
end
end
end end

@ -144,6 +144,34 @@ defmodule Explorer.SmartContract.VerifierTest do
assert abi != nil assert abi != nil
end end
test "verifies a library" do
bytecode =
"0x7349f540c22cba15c47a08c235e20081474201a742301460806040526004361060335760003560e01c8063c2985578146038575b600080fd5b603e60b0565b6040805160208082528351818301528351919283929083019185019080838360005b8381101560765781810151838201526020016060565b50505050905090810190601f16801560a25780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b604080518082019091526003815262666f6f60e81b60208201529056fea265627a7a72315820174b282a3ef3b9778d79fbc2e4c36bc939c54dfaaaa51d3122ee6e648093844c64736f6c634300050b0032"
contract_address = insert(:contract_address, contract_code: bytecode)
code = """
pragma solidity 0.5.11;
library Foo {
function foo() external pure returns (string memory) {
return "foo";
}
}
"""
params = %{
"contract_source_code" => code,
"compiler_version" => "v0.5.11+commit.c082d0b4",
"evm_version" => "default",
"name" => "Foo",
"optimization" => true
}
assert {:ok, %{abi: abi}} = Verifier.evaluate_authenticity(contract_address.hash, params)
assert abi != nil
end
test "verifies smart contract compiled with Solidity 0.5.9 (includes new metadata in bytecode) with constructor args" do test "verifies smart contract compiled with Solidity 0.5.9 (includes new metadata in bytecode) with constructor args" do
path = File.cwd!() <> "/test/support/fixture/smart_contract/solidity_0.5.9_smart_contract.sol" path = File.cwd!() <> "/test/support/fixture/smart_contract/solidity_0.5.9_smart_contract.sol"
contract = File.read!(path) contract = File.read!(path)

@ -45,6 +45,8 @@ defmodule Explorer.DataCase do
Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Blocks.child_id()) Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Blocks.child_id())
Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id()) Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id())
Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id()) Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id())
Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Accounts.child_id())
Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Accounts.child_id())
:ok :ok
end end

@ -13,7 +13,7 @@ defmodule Indexer.Block.Fetcher do
alias Explorer.Chain alias Explorer.Chain
alias Explorer.Chain.{Address, Block, Hash, Import, Transaction} alias Explorer.Chain.{Address, Block, Hash, Import, Transaction}
alias Explorer.Chain.Cache.Blocks, as: BlocksCache alias Explorer.Chain.Cache.Blocks, as: BlocksCache
alias Explorer.Chain.Cache.{BlockNumber, Transactions} alias Explorer.Chain.Cache.{Accounts, BlockNumber, Transactions}
alias Indexer.Block.Fetcher.Receipts alias Indexer.Block.Fetcher.Receipts
alias Indexer.Fetcher.{ alias Indexer.Fetcher.{
@ -176,6 +176,7 @@ defmodule Indexer.Block.Fetcher do
result = {:ok, %{inserted: inserted, errors: blocks_errors}} result = {:ok, %{inserted: inserted, errors: blocks_errors}}
update_block_cache(inserted[:blocks]) update_block_cache(inserted[:blocks])
update_transactions_cache(inserted[:transactions]) update_transactions_cache(inserted[:transactions])
update_addresses_cache(inserted[:addresses])
result result
else else
{step, {:error, reason}} -> {:error, {step, reason}} {step, {:error, reason}} -> {:error, {step, reason}}
@ -183,6 +184,8 @@ defmodule Indexer.Block.Fetcher do
end end
end end
defp update_block_cache([]), do: :ok
defp update_block_cache(blocks) when is_list(blocks) do defp update_block_cache(blocks) when is_list(blocks) do
{min_block, max_block} = Enum.min_max_by(blocks, & &1.number) {min_block, max_block} = Enum.min_max_by(blocks, & &1.number)
@ -197,6 +200,8 @@ defmodule Indexer.Block.Fetcher do
Transactions.update(transactions) Transactions.update(transactions)
end end
defp update_addresses_cache(addresses), do: Accounts.drop(addresses)
def import( def import(
%__MODULE__{broadcast: broadcast, callback_module: callback_module} = state, %__MODULE__{broadcast: broadcast, callback_module: callback_module} = state,
options options

@ -27,6 +27,7 @@ defmodule Indexer.Block.Realtime.Fetcher do
alias Ecto.Changeset alias Ecto.Changeset
alias EthereumJSONRPC.{FetchedBalances, Subscription} alias EthereumJSONRPC.{FetchedBalances, Subscription}
alias Explorer.Chain alias Explorer.Chain
alias Explorer.Chain.Cache.Accounts
alias Explorer.Counters.AverageBlockTime alias Explorer.Counters.AverageBlockTime
alias Indexer.{Block, Tracer} alias Indexer.{Block, Tracer}
alias Indexer.Block.Realtime.TaskSupervisor alias Indexer.Block.Realtime.TaskSupervisor
@ -197,6 +198,8 @@ defmodule Indexer.Block.Realtime.Fetcher do
json_rpc_named_arguments json_rpc_named_arguments
) )
Accounts.drop(imported[:addresses])
ok ok
end end
end end

@ -17,6 +17,7 @@ defmodule Indexer.Fetcher.BlockReward do
alias EthereumJSONRPC.FetchedBeneficiaries alias EthereumJSONRPC.FetchedBeneficiaries
alias Explorer.Chain alias Explorer.Chain
alias Explorer.Chain.{Block, Wei} alias Explorer.Chain.{Block, Wei}
alias Explorer.Chain.Cache.Accounts
alias Indexer.{BufferedTask, Tracer} alias Indexer.{BufferedTask, Tracer}
alias Indexer.Fetcher.BlockReward.Supervisor, as: BlockRewardSupervisor alias Indexer.Fetcher.BlockReward.Supervisor, as: BlockRewardSupervisor
alias Indexer.Fetcher.CoinBalance alias Indexer.Fetcher.CoinBalance
@ -130,7 +131,9 @@ defmodule Indexer.Fetcher.BlockReward do
|> add_gas_payments() |> add_gas_payments()
|> import_block_reward_params() |> import_block_reward_params()
|> case do |> case do
{:ok, %{address_coin_balances: address_coin_balances}} -> {:ok, %{address_coin_balances: address_coin_balances, addresses: addresses}} ->
Accounts.drop(addresses)
CoinBalance.async_fetch_balances(address_coin_balances) CoinBalance.async_fetch_balances(address_coin_balances)
retry_errors(errors) retry_errors(errors)

@ -14,6 +14,7 @@ defmodule Indexer.Fetcher.CoinBalance do
alias EthereumJSONRPC.FetchedBalances alias EthereumJSONRPC.FetchedBalances
alias Explorer.Chain alias Explorer.Chain
alias Explorer.Chain.{Block, Hash} alias Explorer.Chain.{Block, Hash}
alias Explorer.Chain.Cache.Accounts
alias Indexer.{BufferedTask, Tracer} alias Indexer.{BufferedTask, Tracer}
@behaviour BufferedTask @behaviour BufferedTask
@ -136,7 +137,9 @@ defmodule Indexer.Fetcher.CoinBalance do
end end
defp run_fetched_balances(%FetchedBalances{errors: errors} = fetched_balances, _) do defp run_fetched_balances(%FetchedBalances{errors: errors} = fetched_balances, _) do
{:ok, _} = import_fetched_balances(fetched_balances) {:ok, imported} = import_fetched_balances(fetched_balances)
Accounts.drop(imported[:addresses])
retry(errors) retry(errors)
end end

@ -19,7 +19,7 @@ defmodule Indexer.Fetcher.CoinBalanceOnDemand do
alias Explorer.{Chain, Repo} alias Explorer.{Chain, Repo}
alias Explorer.Chain.Address alias Explorer.Chain.Address
alias Explorer.Chain.Address.CoinBalance alias Explorer.Chain.Address.CoinBalance
alias Explorer.Chain.Cache.BlockNumber alias Explorer.Chain.Cache.{Accounts, BlockNumber}
alias Explorer.Counters.AverageBlockTime alias Explorer.Counters.AverageBlockTime
alias Indexer.Fetcher.CoinBalance, as: CoinBalanceFetcher alias Indexer.Fetcher.CoinBalance, as: CoinBalanceFetcher
alias Timex.Duration alias Timex.Duration
@ -71,7 +71,11 @@ defmodule Indexer.Fetcher.CoinBalanceOnDemand do
end end
def handle_cast({:fetch_and_update, block_number, address}, state) do def handle_cast({:fetch_and_update, block_number, address}, state) do
fetch_and_update(block_number, address, state.json_rpc_named_arguments) result = fetch_and_update(block_number, address, state.json_rpc_named_arguments)
with {:ok, %{addresses: addresses}} <- result do
Accounts.drop(addresses)
end
{:noreply, state} {:noreply, state}
end end

@ -12,6 +12,7 @@ defmodule Indexer.Fetcher.ContractCode do
alias Explorer.Chain alias Explorer.Chain
alias Explorer.Chain.{Block, Hash} alias Explorer.Chain.{Block, Hash}
alias Explorer.Chain.Cache.Accounts
alias Indexer.{BufferedTask, Tracer} alias Indexer.{BufferedTask, Tracer}
alias Indexer.Transform.Addresses alias Indexer.Transform.Addresses
@ -126,7 +127,8 @@ defmodule Indexer.Fetcher.ContractCode do
addresses: %{params: merged_addresses_params}, addresses: %{params: merged_addresses_params},
timeout: :infinity timeout: :infinity
}) do }) do
{:ok, _} -> {:ok, imported} ->
Accounts.drop(imported[:addresses])
:ok :ok
{:error, step, reason, _changes_so_far} -> {:error, step, reason, _changes_so_far} ->

@ -14,6 +14,7 @@ defmodule Indexer.Fetcher.InternalTransaction do
alias Explorer.Chain alias Explorer.Chain
alias Explorer.Chain.{Block, Hash} alias Explorer.Chain.{Block, Hash}
alias Explorer.Chain.Cache.Accounts
alias Indexer.{BufferedTask, Tracer} alias Indexer.{BufferedTask, Tracer}
alias Indexer.Transform.Addresses alias Indexer.Transform.Addresses
@ -218,6 +219,8 @@ defmodule Indexer.Fetcher.InternalTransaction do
case imports do case imports do
{:ok, imported} -> {:ok, imported} ->
Accounts.drop(imported[:addreses])
async_import_coin_balances(imported, %{ async_import_coin_balances(imported, %{
address_hash_to_fetched_balance_block_number: address_hash_to_block_number address_hash_to_fetched_balance_block_number: address_hash_to_block_number
}) })

@ -14,6 +14,7 @@ defmodule Indexer.Fetcher.PendingTransaction do
alias Ecto.Changeset alias Ecto.Changeset
alias Explorer.Chain alias Explorer.Chain
alias Explorer.Chain.Cache.Accounts
alias Indexer.Fetcher.PendingTransaction alias Indexer.Fetcher.PendingTransaction
alias Indexer.Transform.Addresses alias Indexer.Transform.Addresses
@ -148,7 +149,8 @@ defmodule Indexer.Fetcher.PendingTransaction do
broadcast: :realtime, broadcast: :realtime,
transactions: %{params: transactions_params, on_conflict: :nothing} transactions: %{params: transactions_params, on_conflict: :nothing}
}) do }) do
{:ok, _} -> {:ok, imported} ->
Accounts.drop(imported[:addresses])
:ok :ok
{:error, [%Changeset{} | _] = changesets} -> {:error, [%Changeset{} | _] = changesets} ->

@ -12,6 +12,7 @@ defmodule Indexer.Fetcher.UncleBlock do
alias Ecto.Changeset alias Ecto.Changeset
alias EthereumJSONRPC.Blocks alias EthereumJSONRPC.Blocks
alias Explorer.Chain alias Explorer.Chain
alias Explorer.Chain.Cache.Accounts
alias Explorer.Chain.Hash alias Explorer.Chain.Hash
alias Indexer.{Block, BufferedTask, Tracer} alias Indexer.{Block, BufferedTask, Tracer}
alias Indexer.Fetcher.UncleBlock alias Indexer.Fetcher.UncleBlock
@ -126,7 +127,8 @@ defmodule Indexer.Fetcher.UncleBlock do
block_second_degree_relations: %{params: block_second_degree_relations_params}, block_second_degree_relations: %{params: block_second_degree_relations_params},
transactions: %{params: transactions_params, on_conflict: :nothing} transactions: %{params: transactions_params, on_conflict: :nothing}
}) do }) do
{:ok, _} -> {:ok, imported} ->
Accounts.drop(imported[:addresses])
retry(errors) retry(errors)
{:error, {:import = step, [%Changeset{} | _] = changesets}} -> {:error, {:import = step, [%Changeset{} | _] = changesets}} ->

@ -105,19 +105,15 @@ defmodule Indexer.Temporary.BlocksTransactionsMismatch do
end) end)
unless Enum.empty?(matching_blocks_data) do unless Enum.empty?(matching_blocks_data) do
hashes = Enum.map(matching_blocks_data, fn {hash, _trans_num} -> hash end) matching_blocks_data
|> Enum.map(fn {hash, _trans_num} -> hash end)
Block |> update_in_order(refetch_needed: false)
|> where([block], block.hash in ^hashes)
|> Repo.update_all(set: [refetch_needed: false])
end end
unless Enum.empty?(unmatching_blocks_data) do unless Enum.empty?(unmatching_blocks_data) do
hashes = Enum.map(unmatching_blocks_data, fn {hash, _trans_num} -> hash end) unmatching_blocks_data
|> Enum.map(fn {hash, _trans_num} -> hash end)
Block |> update_in_order(refetch_needed: false, consensus: false)
|> where([block], block.hash in ^hashes)
|> Repo.update_all(set: [refetch_needed: false, consensus: false])
end end
if Enum.empty?(missing_blocks_data) do if Enum.empty?(missing_blocks_data) do
@ -126,4 +122,19 @@ defmodule Indexer.Temporary.BlocksTransactionsMismatch do
{:retry, missing_blocks_data} {:retry, missing_blocks_data}
end end
end end
defp update_in_order(hashes, fields_to_set) do
query =
from(block in Block,
where: block.hash in ^hashes,
# Enforce Block ShareLocks order (see docs: sharelocks.md)
order_by: [asc: block.hash],
lock: "FOR UPDATE"
)
Repo.update_all(
from(b in Block, join: s in subquery(query), on: b.hash == s.hash),
set: fields_to_set
)
end
end end

@ -84,18 +84,6 @@ defmodule Indexer.Block.Fetcher.ReceiptsTest do
"transactionIndex" => "0x0", "transactionIndex" => "0x0",
"transactionLogIndex" => "0x0", "transactionLogIndex" => "0x0",
"type" => "mined" "type" => "mined"
},
%{
"address" => "0x8bf38d4764929064f2d4d3a56520a76ab3df415c",
"blockHash" => nil,
"blockNumber" => nil,
"data" => "0x000000000000000000000000862d67cb0773ee3f8ce7ea89b328ffea861ab3ef",
"logIndex" => "0x1",
"topics" => ["0x600bcf04a13e752d1e3670a5a9f1c21177ca2a93c6f5391d4f1298d098097c22"],
"transactionHash" => "0x53bd884872de3e488692881baeec262e7b95234d3965248c39fe992fffd433e5",
"transactionIndex" => "0x0",
"transactionLogIndex" => "0x0",
"type" => "pending"
} }
], ],
"logsBloom" => "logsBloom" =>
@ -158,8 +146,6 @@ defmodule Indexer.Block.Fetcher.ReceiptsTest do
log[:transaction_hash] == "0x43bd884872de3e488692881baeec262e7b95234d3965248c39fe992fffd433e5" && log[:transaction_hash] == "0x43bd884872de3e488692881baeec262e7b95234d3965248c39fe992fffd433e5" &&
log[:block_number] == 46147 log[:block_number] == 46147
end) end)
refute Enum.find(logs, fn log -> log[:type] == "pending" end)
end end
end end
end end

@ -56,6 +56,7 @@ $ export NETWORK=POA
| `SUPPORTED_CHAINS` | | Array of supported chains that displays in the footer and in the chains dropdown. This var was introduced in this PR [#1900](https://github.com/poanetwork/blockscout/pull/1900) and looks like an array of JSON objects. | (empty) | v2.0.0+ | | | | `SUPPORTED_CHAINS` | | Array of supported chains that displays in the footer and in the chains dropdown. This var was introduced in this PR [#1900](https://github.com/poanetwork/blockscout/pull/1900) and looks like an array of JSON objects. | (empty) | v2.0.0+ | | |
| `BLOCK_COUNT_CACHE_PERIOD ` | | time to live of cache in seconds. This var was introduced in [#1876](https://github.com/poanetwork/blockscout/pull/1876) | 600 | v2.0.0+ | | | | `BLOCK_COUNT_CACHE_PERIOD ` | | time to live of cache in seconds. This var was introduced in [#1876](https://github.com/poanetwork/blockscout/pull/1876) | 600 | v2.0.0+ | | |
| `ALLOWED_EVM_VERSIONS ` | | the comma-separated list of allowed EVM versions for contracts verification. This var was introduced in [#1964](https://github.com/poanetwork/blockscout/pull/1964) | "homestead, tangerineWhistle, spuriousDragon, byzantium, constantinople, petersburg" | v2.0.0+ | | | | `ALLOWED_EVM_VERSIONS ` | | the comma-separated list of allowed EVM versions for contracts verification. This var was introduced in [#1964](https://github.com/poanetwork/blockscout/pull/1964) | "homestead, tangerineWhistle, spuriousDragon, byzantium, constantinople, petersburg" | v2.0.0+ | | |
| `UNCLES_IN_AVERAGE_BLOCK_TIME` | Include or exclude nonconsensus blocks in avg block time calculation. Exclude if `false`. | false | v2.0.1+ | | |
| `AVERAGE_BLOCK_CACHE_PERIOD` | | Update of average block cache, in seconds | 30 minutes | v2.0.2+ | | | `AVERAGE_BLOCK_CACHE_PERIOD` | | Update of average block cache, in seconds | 30 minutes | v2.0.2+ | |
| `MARKET_HISTORY_CACHE_PERIOD` | | Update of market history cache, in seconds | 6 hours | v2.0.2+ | | | `MARKET_HISTORY_CACHE_PERIOD` | | Update of market history cache, in seconds | 6 hours | v2.0.2+ | |
| `DISABLE_WEBAPP` | | If `true`, endpoints to webapp are hidden (compile-time) | `false` | v2.0.3+ | :white_check_mark: | | | `DISABLE_WEBAPP` | | If `true`, endpoints to webapp are hidden (compile-time) | `false` | v2.0.3+ | :white_check_mark: | |
@ -65,6 +66,6 @@ $ export NETWORK=POA
| `WEBAPP_URL` | | Link to web application instance, e.g. `http://host/path` | (empty) | v2.0.3+ | | | | `WEBAPP_URL` | | Link to web application instance, e.g. `http://host/path` | (empty) | v2.0.3+ | | |
| `API_URL` | | Link to API instance, e.g. `http://host/path` | (empty) | v2.0.3+ | | | | `API_URL` | | Link to API instance, e.g. `http://host/path` | (empty) | v2.0.3+ | | |
| `CHAIN_SPEC_PATH` | | Chain specification path (absolute file system path or url) to import block emission reward ranges and genesis account balances from | (empty) | v2.0.4+ | | | | `CHAIN_SPEC_PATH` | | Chain specification path (absolute file system path or url) to import block emission reward ranges and genesis account balances from | (empty) | v2.0.4+ | | |
| `COIN_GECKO_ID` | | CoinGecko coin id required for fetching an exchange rate | poa-network | v2.0.4+ | | | | `COIN_GECKO_ID` | | CoinGecko coin id required for fetching an exchange rate | poa-network | v2.0.4+ | | master |
| `EMISSION_FORMAT` | | Should be set to `POA` if you have block emission indentical to POA Network. This env var is used only if `CHAIN_SPEC_PATH` is set | `STANDARD` | v2.0.4+ | | | | `EMISSION_FORMAT` | | Should be set to `POA` if you have block emission indentical to POA Network. This env var is used only if `CHAIN_SPEC_PATH` is set | `STANDARD` | v2.0.4+ | | |
| `REWARDS_CONTRACT_ADDRESS` | | Emission rewards contract address. This env var is used only if `EMISSION_FORMAT` is set to `POA` | `0xeca443e8e1ab29971a45a9c57a6a9875701698a5` | v2.0.4+ | | | | `REWARDS_CONTRACT_ADDRESS` | | Emission rewards contract address. This env var is used only if `EMISSION_FORMAT` is set to `POA` | `0xeca443e8e1ab29971a45a9c57a6a9875701698a5` | v2.0.4+ | | |

@ -0,0 +1,137 @@
<!--sharelocks.md -->
## ShareLocks
ShareLock is the row-level locking mechanism used internally by PostgreSQL.
### Deadlocks and prevention
When several DB transactions are acting on multiple rows of the same table, it's
possible to incur in a deadlock and so into an error.
This can be prevented by enforcing the same consistent order of lock aquisition
on *all* the transactions performing `INSERT`, `UPDATE` or `DELETE` on a given table.
On top of this, if multiple DB transactions act on multiple tables a deadlock
will occur, even if they follow the order on each table described above, if they
acquire locks on said tables in a different order.
This can also be prevented by using a consisten order of lock acquisition *between*
different tables.
### Imposing the lock acquisition order on a table with Ecto
When `INSERT`ing a list of rows Postgres will respect the order in which they
appear in the query, so the reordering can happen beforehand.
For example, this will work:
```elixir
entries = [...]
ordered_entries = Enum.sort_by(entries, & &1.id)
Repo.insert_all(__MODULE__, ordered_entries)
```
Performing `UPDATE`s is trickier because there is no `ORDER BY` clause.
The solution to this is to `JOIN` on a subquery that `SELECT`s with the option `FOR UPDATE`.
Using Ecto this can be done, for example, like this:
```elixir
query =
from(
entry in Entry,
where: not is_nil(entry.value),
order_by: entry.id,
lock: "FOR UPDATE"
)
Repo.update_all(
from(e in Entry, join: s in subquery(query), on: e.id == s.id),
[set: [value: nil]],
timeout: timeout)
```
`DELETE` has the same quircks as `UPDATE` and it is too solved in the same way.
For example:
```elixir
query =
from(
entry in Entry,
where: is_nil(entry.value),
order_by: entry.id,
lock: "FOR UPDATE"
)
Repo.delete_all(from(e in Entry, join: s in subquery(query), on: e.id == s.id))
```
### Imposing the lock acquisition order between tables with Ecto
When using an `Ecto.Multi` to perform `INSERT`, `UPDATE` or `DELETE` on multiple
tables the order to keep is between different operation.
For example, supposing `EntryA` was established to be modified before `EntryB`,
this is not correct:
```elixir
Multi.new()
|> Multi.run(:update_b, fn repo, _ ->
# operations with ordered locks on `EntryB`
end)
|> Multi.run(:update_a, fn repo, _ ->
# operations with ordered locks on `EntryA`
end)
|> Repo.transaction()
```
When possible, the simple solution is to move `:update_a` to be before `:update_b`.
When not possible, for instance if `:update_a` depends on the result of `:update_b`,
this can be solved by acquiring the locks in a separate operation.
For example:
```elixir
Multi.new()
|> Multi.run(:acquire_a, fn repo, _ ->
# acquire locks in order on `EntryA`
end)
|> Multi.run(:update_b, fn repo, _ ->
# operations with ordered locks on `EntryB`
end)
|> Multi.run(:update_a, fn repo, %{acquire_a: values} ->
# operations (no need to enforce order again) on `EntryA`
end)
|> Repo.transaction()
```
Note also that for the same reasons multiple operations on the same table in the
same transaction are not safe to perform if they each acquire locks in order,
because locks are not released until the transaction is committed.
### Order used for Explorer's tables
This is a complete list of the ordering currently in use on each table.
It also specifies the order between tables in the same transaction: locks for a
table on top need to be acquired before those from a table on the bottom.
Note that this should always be enforced because as long as there is one DB
transaction performing in a different order there is the possibility of a deadlock.
| schema module | table name | ordered by |
|---------------|------------|------------|
| Explorer.Chain.Address | addresses | asc: :hash |
| Explorer.Chain.Address.Name | address_names | [asc: :address_hash, asc: :name] |
| Explorer.Chain.Address.CoinBalance | address_coin_balances | [asc: :address_hash, asc: :block_number] |
| Explorer.Chain.Block | blocks | asc: :hash |
| Explorer.Chain.Block.SecondDegreeRelation | block_second_degree_relations | [asc: :nephew_hash, asc: :uncle_hash] |
| Explorer.Chain.Block.Reward | block_rewards | [asc: :address_hash, asc: :address_type, asc: :block_hash] |
| Explorer.Chain.Block.EmissionReward | emission_rewards | asc: :block_range |
| Explorer.Chain.Transaction | transactions | asc: :hash |
| Explorer.Chain.Transaction.Fork | transaction_forks | [asc: :uncle_hash, asc: :index] |
| Explorer.Chain.Log | logs | [asc: :transaction_hash, asc: :index] |
| Explorer.Chain.InternalTransaction | internal_transactions | [asc: :transaction_hash, asc: :index] |
| Explorer.Chain.Token | tokens | asc: :contract_address_hash |
| Explorer.Chain.TokenTransfer | token_transfers | [asc: :transaction_hash, asc: :log_index]|
| Explorer.Chain.Address.TokenBalance | address_token_balances | [asc: :address_hash, asc: :token_contract_address_hash, asc: :block_number] |
| Explorer.Chain.Address.CurrentTokenBalance | address_current_token_balances | [asc: :address_hash, asc: :token_contract_address_hash] |
| Explorer.Chain.StakingPool | staking_pools | :staking_address_hash |
| Explorer.Chain.StakingPoolsDelegator | staking_pools_delegators | [asc: :delegator_address_hash, asc: :pool_address_hash] |
| Explorer.Chain.ContractMethod | contract_methods | [asc: :identified, asc: :abi]
| Explorer.Market.MarketHistory | market_history | asc: :date |

@ -39,7 +39,7 @@
"ex_cldr_numbers": {:hex, :ex_cldr_numbers, "2.6.4", "5b1ac8451f889576bb29dee70412de1170974298727ab944aa4d17e91bdd3472", [:mix], [{:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:ex_cldr, "~> 2.6", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:ex_cldr_currencies, "~> 2.3", [hex: :ex_cldr_currencies, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"}, "ex_cldr_numbers": {:hex, :ex_cldr_numbers, "2.6.4", "5b1ac8451f889576bb29dee70412de1170974298727ab944aa4d17e91bdd3472", [:mix], [{:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:ex_cldr, "~> 2.6", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:ex_cldr_currencies, "~> 2.3", [hex: :ex_cldr_currencies, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
"ex_cldr_units": {:hex, :ex_cldr_units, "2.5.1", "0e65067a22a7c5146266c313d6333c2700868c32aa6d536f47c6c0d84aac3ac1", [:mix], [{:ex_cldr, "~> 2.6", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:ex_cldr_lists, "~> 2.2", [hex: :ex_cldr_lists, repo: "hexpm", optional: false]}, {:ex_cldr_numbers, "~> 2.6", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"}, "ex_cldr_units": {:hex, :ex_cldr_units, "2.5.1", "0e65067a22a7c5146266c313d6333c2700868c32aa6d536f47c6c0d84aac3ac1", [:mix], [{:ex_cldr, "~> 2.6", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:ex_cldr_lists, "~> 2.2", [hex: :ex_cldr_lists, repo: "hexpm", optional: false]}, {:ex_cldr_numbers, "~> 2.6", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
"ex_doc": {:hex, :ex_doc, "0.19.3", "3c7b0f02851f5fc13b040e8e925051452e41248f685e40250d7e40b07b9f8c10", [:mix], [{:earmark, "~> 1.2", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.10", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"}, "ex_doc": {:hex, :ex_doc, "0.19.3", "3c7b0f02851f5fc13b040e8e925051452e41248f685e40250d7e40b07b9f8c10", [:mix], [{:earmark, "~> 1.2", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.10", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
"ex_json_schema": {:hex, :ex_json_schema, "0.6.1", "b57c0588385b8262b80f19d33d9b9b71fcd60d247691abf2635b57a03ec0ad44", [:mix], [], "hexpm"}, "ex_json_schema": {:hex, :ex_json_schema, "0.6.2", "de23d80478215987469c81688208fe0ff440ee0e0e6ae2268fcadbb2ff35df9d", [:mix], [], "hexpm"},
"ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"}, "ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"},
"ex_rlp": {:hex, :ex_rlp, "0.5.2", "7f4ce7bd55e543c054ce6d49629b01e9833c3462e3d547952be89865f39f2c58", [:mix], [], "hexpm"}, "ex_rlp": {:hex, :ex_rlp, "0.5.2", "7f4ce7bd55e543c054ce6d49629b01e9833c3462e3d547952be89865f39f2c58", [:mix], [], "hexpm"},
"ex_utils": {:hex, :ex_utils, "0.1.7", "2c133e0bcdc49a858cf8dacf893308ebc05bc5fba501dc3d2935e65365ec0bf3", [:mix], [], "hexpm"}, "ex_utils": {:hex, :ex_utils, "0.1.7", "2c133e0bcdc49a858cf8dacf893308ebc05bc5fba501dc3d2935e65365ec0bf3", [:mix], [], "hexpm"},

Loading…
Cancel
Save