Merge pull request #4830 from blockscout/vb-speedup-tx-per-day-chart-data-collection

Speed up txs per day chart data collection
pull/4840/head
Victor Baranov 3 years ago committed by GitHub
commit ec3fdcd9a2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 1
      CHANGELOG.md
  2. 19
      apps/block_scout_web/assets/package-lock.json
  3. 124
      apps/explorer/lib/explorer/chain/transaction/history/historian.ex
  4. 2
      apps/explorer/lib/explorer/history/process.ex
  5. 6
      apps/explorer/test/explorer/history/process_test.exs

@ -24,6 +24,7 @@
### Fixes ### Fixes
- [#4835](https://github.com/blockscout/blockscout/pull/4835) - Fix view for broken token icons - [#4835](https://github.com/blockscout/blockscout/pull/4835) - Fix view for broken token icons
- [#4830](https://github.com/blockscout/blockscout/pull/4830) - Speed up txs per day chart data collection
- [#4818](https://github.com/blockscout/blockscout/pull/4818) - Fix for extract_omni_bridged_token_metadata_wrapper method - [#4818](https://github.com/blockscout/blockscout/pull/4818) - Fix for extract_omni_bridged_token_metadata_wrapper method
- [#4812](https://github.com/blockscout/blockscout/pull/4812), [#4815](https://github.com/blockscout/blockscout/pull/4815) - Check if exists custom_cap property of extended token object before access it - [#4812](https://github.com/blockscout/blockscout/pull/4812), [#4815](https://github.com/blockscout/blockscout/pull/4815) - Check if exists custom_cap property of extended token object before access it
- [#4810](https://github.com/blockscout/blockscout/pull/4810) - Show `nil` block.size as `N/A bytes` - [#4810](https://github.com/blockscout/blockscout/pull/4810) - Show `nil` block.size as `N/A bytes`

@ -11,7 +11,7 @@
"@tarekraafat/autocomplete.js": "^10.2.6", "@tarekraafat/autocomplete.js": "^10.2.6",
"assert": "^2.0.0", "assert": "^2.0.0",
"bignumber.js": "^9.0.0", "bignumber.js": "^9.0.0",
"bootstrap": "^4.3.1", "bootstrap": "^4.6.0",
"chart.js": "^3.5.1", "chart.js": "^3.5.1",
"chartjs-adapter-moment": "^1.0.0", "chartjs-adapter-moment": "^1.0.0",
"clipboard": "^2.0.4", "clipboard": "^2.0.4",
@ -4168,19 +4168,16 @@
"dev": true "dev": true
}, },
"node_modules/bootstrap": { "node_modules/bootstrap": {
"version": "4.4.1", "version": "4.6.1",
"resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-4.4.1.tgz", "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-4.6.1.tgz",
"integrity": "sha512-tbx5cHubwE6e2ZG7nqM3g/FZ5PQEDMWmMGNrCUBVRPHXTJaH7CBDdsLeu3eCh3B1tzAxTnAbtmrzvWEvT2NNEA==", "integrity": "sha512-0dj+VgI9Ecom+rvvpNZ4MUZJz8dcX7WCX+eTID9+/8HgOkv3dsRzi8BGeZJCQU6flWQVYxwTQnEZFrmJSEO7og==",
"engines": {
"node": ">=6"
},
"funding": { "funding": {
"type": "opencollective", "type": "opencollective",
"url": "https://opencollective.com/bootstrap" "url": "https://opencollective.com/bootstrap"
}, },
"peerDependencies": { "peerDependencies": {
"jquery": "1.9.1 - 3", "jquery": "1.9.1 - 3",
"popper.js": "^1.16.0" "popper.js": "^1.16.1"
} }
}, },
"node_modules/brace-expansion": { "node_modules/brace-expansion": {
@ -21366,9 +21363,9 @@
"dev": true "dev": true
}, },
"bootstrap": { "bootstrap": {
"version": "4.4.1", "version": "4.6.1",
"resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-4.4.1.tgz", "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-4.6.1.tgz",
"integrity": "sha512-tbx5cHubwE6e2ZG7nqM3g/FZ5PQEDMWmMGNrCUBVRPHXTJaH7CBDdsLeu3eCh3B1tzAxTnAbtmrzvWEvT2NNEA==", "integrity": "sha512-0dj+VgI9Ecom+rvvpNZ4MUZJz8dcX7WCX+eTID9+/8HgOkv3dsRzi8BGeZJCQU6flWQVYxwTQnEZFrmJSEO7og==",
"requires": {} "requires": {}
}, },
"brace-expansion": { "brace-expansion": {

@ -5,11 +5,11 @@ defmodule Explorer.Chain.Transaction.History.Historian do
require Logger require Logger
use Explorer.History.Historian use Explorer.History.Historian
alias Explorer.{Chain, Repo}
alias Explorer.Chain.{Block, Transaction} alias Explorer.Chain.{Block, Transaction}
alias Explorer.Chain.Events.Publisher alias Explorer.Chain.Events.Publisher
alias Explorer.Chain.Transaction.History.TransactionStats alias Explorer.Chain.Transaction.History.TransactionStats
alias Explorer.History.Process, as: HistoryProcess alias Explorer.History.Process, as: HistoryProcess
alias Explorer.Repo
import Ecto.Query, only: [from: 2, subquery: 1] import Ecto.Query, only: [from: 2, subquery: 1]
@ -19,8 +19,10 @@ defmodule Explorer.Chain.Transaction.History.Historian do
def compile_records(num_days, records \\ []) do def compile_records(num_days, records \\ []) do
Logger.info("tx/per day chart: collect records for txs per day stats") Logger.info("tx/per day chart: collect records for txs per day stats")
if num_days == 0 do if num_days == 1 do
Logger.info("tx/per day chart: records collected #{inspect(records)}") Logger.info("tx/per day chart: records collected #{inspect(records)}")
records = [%{date: date_today(), number_of_transactions: 0, gas_used: 0, total_fee: 0} | records]
# base case # base case
{:ok, records} {:ok, records}
else else
@ -33,60 +35,92 @@ defmodule Explorer.Chain.Transaction.History.Historian do
Logger.info("tx/per day chart: latest date #{DateTime.to_string(latest)}") Logger.info("tx/per day chart: latest date #{DateTime.to_string(latest)}")
min_max_block_query = with {:ok, min_block} <- Chain.timestamp_to_block_number(earliest, :after),
from(block in Block, {:ok, max_block} <- Chain.timestamp_to_block_number(latest, :after) do
where: block.timestamp >= ^earliest and block.timestamp <= ^latest, record =
select: {min(block.number), max(block.number)} min_block
) |> compile_records_in_range(max_block)
|> Map.put(:date, day_to_fetch)
{min_block, max_block} = Repo.one(min_max_block_query, timeout: :infinity)
Logger.info("tx/per day chart: min/max block numbers [#{min_block}, #{max_block}]")
if min_block && max_block do
all_transactions_query =
from(
transaction in Transaction,
where: transaction.block_number >= ^min_block and transaction.block_number <= ^max_block
)
query =
from(transaction in subquery(all_transactions_query),
join: block in Block,
on: transaction.block_hash == block.hash,
where: block.consensus == true,
select: transaction
)
num_transactions = Repo.aggregate(query, :count, :hash, timeout: :infinity)
Logger.info("tx/per day chart: num of transactions #{num_transactions}")
gas_used = Repo.aggregate(query, :sum, :gas_used, timeout: :infinity)
Logger.info("tx/per day chart: total gas used #{gas_used}")
total_fee_query =
from(transaction in subquery(all_transactions_query),
join: block in Block,
on: transaction.block_hash == block.hash,
where: block.consensus == true,
select: fragment("SUM(? * ?)", transaction.gas_price, transaction.gas_used)
)
total_fee = Repo.one(total_fee_query, timeout: :infinity)
Logger.info("tx/per day chart: total fee #{total_fee}")
records = [ records = [
%{date: day_to_fetch, number_of_transactions: num_transactions, gas_used: gas_used, total_fee: total_fee} record
| records | records
] ]
compile_records(num_days - 1, records) compile_records(num_days - 1, records)
else else
records = [%{date: day_to_fetch, number_of_transactions: 0, gas_used: 0, total_fee: 0} | records] _ ->
compile_records(num_days - 1, records) min_max_block_query =
from(block in Block,
where: block.timestamp >= ^earliest and block.timestamp <= ^latest,
select: {min(block.number), max(block.number)}
)
{min_block, max_block} = Repo.one(min_max_block_query, timeout: :infinity)
if min_block && max_block do
record =
min_block
|> compile_records_in_range(max_block)
|> Map.put(:date, day_to_fetch)
records = [
record
| records
]
compile_records(num_days - 1, records)
else
records = [%{date: day_to_fetch, number_of_transactions: 0, gas_used: 0, total_fee: 0} | records]
compile_records(num_days - 1, records)
end
end end
end end
end end
defp compile_records_in_range(min_block, max_block) do
Logger.info("tx/per day chart: min/max block numbers [#{min_block}, #{max_block}]")
all_transactions_query =
from(
transaction in Transaction,
where: transaction.block_number >= ^min_block and transaction.block_number <= ^max_block
)
all_blocks_query =
from(
block in Block,
where: block.consensus == true,
where: block.number >= ^min_block and block.number <= ^max_block,
select: block.number
)
query =
from(transaction in subquery(all_transactions_query),
join: block in subquery(all_blocks_query),
on: transaction.block_number == block.number,
select: transaction
)
num_transactions = Repo.aggregate(query, :count, :hash, timeout: :infinity)
Logger.info("tx/per day chart: num of transactions #{num_transactions}")
gas_used = Repo.aggregate(query, :sum, :gas_used, timeout: :infinity)
Logger.info("tx/per day chart: total gas used #{gas_used}")
total_fee_query =
from(transaction in subquery(all_transactions_query),
join: block in Block,
on: transaction.block_hash == block.hash,
where: block.consensus == true,
select: fragment("SUM(? * ?)", transaction.gas_price, transaction.gas_used)
)
total_fee = Repo.one(total_fee_query, timeout: :infinity)
Logger.info("tx/per day chart: total fee #{total_fee}")
%{number_of_transactions: num_transactions, gas_used: gas_used, total_fee: total_fee}
end
@impl Historian @impl Historian
def save_records(records) do def save_records(records) do
Logger.info("tx/per day chart: save records") Logger.info("tx/per day chart: save records")

@ -60,7 +60,7 @@ defmodule Explorer.History.Process do
defp schedule_next_compilation do defp schedule_next_compilation do
delay = config_or_default(:history_fetch_interval, :timer.minutes(60)) delay = config_or_default(:history_fetch_interval, :timer.minutes(60))
Process.send_after(self(), {:compile_historical_records, 1}, delay) Process.send_after(self(), {:compile_historical_records, 2}, delay)
end end
@spec failed_compilation(non_neg_integer(), module(), non_neg_integer()) :: any() @spec failed_compilation(non_neg_integer(), module(), non_neg_integer()) :: any()

@ -52,7 +52,7 @@ defmodule Explorer.History.ProcessTest do
record = %{date: ~D[2018-04-01], closing_price: Decimal.new(10), opening_price: Decimal.new(5)} record = %{date: ~D[2018-04-01], closing_price: Decimal.new(10), opening_price: Decimal.new(5)}
TestHistorian TestHistorian
|> expect(:compile_records, fn 1 -> {:ok, [record]} end) |> expect(:compile_records, fn 2 -> {:ok, [record]} end)
|> expect(:save_records, fn _ -> :ok end) |> expect(:save_records, fn _ -> :ok end)
state = %{historian: TestHistorian} state = %{historian: TestHistorian}
@ -66,10 +66,10 @@ defmodule Explorer.History.ProcessTest do
assert {:noreply, state} == HistoryProcess.handle_info({nil, {1, 0, {:ok, [record]}}}, state) assert {:noreply, state} == HistoryProcess.handle_info({nil, {1, 0, {:ok, [record]}}}, state)
# Message isn't sent before interval is up # Message isn't sent before interval is up
refute_receive {:compile_historical_records, 1}, history_fetch_interval - 1 refute_receive {:compile_historical_records, 2}, history_fetch_interval - 1
# Now message is sent # Now message is sent
assert_receive {:compile_historical_records, 1} assert_receive {:compile_historical_records, 2}
end end
test "handle_info with failed task" do test "handle_info with failed task" do

Loading…
Cancel
Save