* master: (68 commits) fix CHANGELOG Update overview.html.eex Update CHANGELOG.md ui issues Update CHANGELOG.md blockscout theme new logo Add files via upload new logo for blockscout theme Update CHANGELOG.md blockscout theme (update) changelog (update) internationalization files (fix) commented out %> tag was showing (fix) missing checkmarks update CHANGELOG with PR 2070 Reduce max_concurrency of BlocksTransactionsMismatch Add CHANGELOG entry and add 1.3.15 changelog Docsify setup Add docs folder for docsify integration stop click twice ... # Conflicts: # CHANGELOG.md # apps/block_scout_web/assets/css/app.scss # apps/block_scout_web/priv/gettext/default.pot # apps/block_scout_web/priv/gettext/en/LC_MESSAGES/default.popull/2071/head
commit
085861fabb
@ -0,0 +1,64 @@ |
||||
.logs-topbar { |
||||
padding-bottom: 30px; |
||||
@media (min-width: 600px) { |
||||
display: flex; |
||||
justify-content: space-between; |
||||
} |
||||
.pagination-container.position-top { |
||||
padding-top: 0 !important; |
||||
} |
||||
} |
||||
|
||||
.logs-search { |
||||
display: flex; |
||||
position: relative; |
||||
@media (max-width: 599px) { |
||||
margin-bottom: 30px; |
||||
} |
||||
} |
||||
|
||||
.logs-search-input, .logs-search-btn, .logs-search-btn-cancel { |
||||
height: 24px; |
||||
background-color: #f5f6fa; |
||||
border: 1px solid #f5f6fa; |
||||
color: #333; |
||||
border-radius: 2px; |
||||
outline: none; |
||||
font-family: Nunito, "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; |
||||
font-size: 12px; |
||||
font-weight: 600; |
||||
} |
||||
|
||||
.logs-search-input { |
||||
padding-left: 6px; |
||||
display: inline-flex; |
||||
flex-grow: 2; |
||||
min-width: 160px; |
||||
&::placeholder { |
||||
color: #a3a9b5; |
||||
} |
||||
} |
||||
|
||||
.logs-search-btn { |
||||
margin-left: 6px; |
||||
color: #a3a9b5; |
||||
transition: .1s ease-in; |
||||
cursor: pointer; |
||||
&:hover { |
||||
background-color: $primary; |
||||
color: #fff; |
||||
border-color: $primary; |
||||
} |
||||
} |
||||
|
||||
.logs-search-btn-cancel { |
||||
color: #a3a9b5; |
||||
cursor: pointer; |
||||
transition: .1s ease-in; |
||||
position: absolute; |
||||
top: 0; |
||||
left: 136px; |
||||
&:hover { |
||||
color: #333; |
||||
} |
||||
} |
@ -1,8 +1,71 @@ |
||||
$primary: #262d62; |
||||
$secondary: #687bf6; |
||||
$tertiary: #687bf6; |
||||
// $primary: #262d62; |
||||
// $secondary: #687bf6; |
||||
// $tertiary: #687bf6; |
||||
|
||||
$dashboard-line-color-price: #8286a9 !default; |
||||
|
||||
$base-border-color: #e2e5ec !default; |
||||
$common-container-margin: 50px !default; |
||||
$common-container-margin: 50px !default; |
||||
|
||||
// general |
||||
$primary: #5c34a2; |
||||
$secondary: #87e1a9; |
||||
$tertiary: #bf9cff; |
||||
$additional-font: #fff; |
||||
|
||||
// footer |
||||
$footer-background-color: #3c226a; |
||||
$footer-title-color: #fff; |
||||
$footer-text-color: #bda6e7; |
||||
$footer-item-disc-color: $secondary; |
||||
.footer-logo { filter: brightness(0) invert(1); } |
||||
|
||||
// dashboard |
||||
$dashboard-line-color-price: $tertiary; // price left border |
||||
|
||||
$dashboard-banner-chart-legend-value-color: $additional-font; // chart labels |
||||
|
||||
$dashboard-stats-item-value-color: $additional-font; // stat values |
||||
|
||||
$dashboard-stats-item-border-color: $secondary; // stat border |
||||
|
||||
$dashboard-banner-gradient-start: $primary; // gradient begin |
||||
|
||||
$dashboard-banner-gradient-end: lighten($primary, 5); // gradient end |
||||
|
||||
$dashboard-banner-network-plain-container-background-color: #865bd4; // stats bg |
||||
|
||||
|
||||
// navigation |
||||
.navbar { box-shadow: 0px 0px 30px 0px rgba(21, 53, 80, 0.12); } // header shadow |
||||
$header-icon-border-color-hover: $primary; // top border on hover |
||||
$header-icon-color-hover: $primary; // nav icon on hover |
||||
.dropdown-item:hover, .dropdown-item:focus { background-color: $primary !important; } // dropdown item on hover |
||||
|
||||
// buttons |
||||
$btn-line-bg: #fff; // button bg |
||||
$btn-line-color: $primary; // button border and font color && hover bg color |
||||
$btn-copy-color: $primary; // btn copy |
||||
$btn-qr-color: $primary; // btn qr-code |
||||
|
||||
//links & tile |
||||
.tile a { color: $primary !important; } // links color for badges |
||||
.tile-type-block { |
||||
border-left: 4px solid $primary; |
||||
} // tab active bg |
||||
|
||||
// card |
||||
$card-background-1: $primary; |
||||
$card-tab-active: $primary; |
||||
|
||||
.footer { |
||||
.tooltip { |
||||
.tooltip-inner { |
||||
background-color: darken($footer-background-color, 10) !important; |
||||
} |
||||
.arrow::before { |
||||
border-top-color: darken($footer-background-color, 10) !important; |
||||
border-bottom-color: darken($footer-background-color, 10) !important; |
||||
} |
||||
} |
||||
} |
||||
|
@ -0,0 +1,75 @@ |
||||
import $ from 'jquery' |
||||
import _ from 'lodash' |
||||
import humps from 'humps' |
||||
import { connectElements } from '../../lib/redux_helpers.js' |
||||
import { createAsyncLoadStore } from '../../lib/async_listing_load' |
||||
|
||||
export const initialState = { |
||||
addressHash: null, |
||||
isSearch: false |
||||
} |
||||
|
||||
export function reducer (state, action) { |
||||
switch (action.type) { |
||||
case 'PAGE_LOAD': |
||||
case 'ELEMENTS_LOAD': { |
||||
return Object.assign({}, state, _.omit(action, 'type')) |
||||
} |
||||
case 'START_SEARCH': { |
||||
return Object.assign({}, state, {pagesStack: [], isSearch: true}) |
||||
} |
||||
default: |
||||
return state |
||||
} |
||||
} |
||||
|
||||
const elements = { |
||||
'[data-search-field]': { |
||||
render ($el, state) { |
||||
return $el |
||||
} |
||||
}, |
||||
'[data-search-button]': { |
||||
render ($el, state) { |
||||
return $el |
||||
} |
||||
}, |
||||
'[data-cancel-search-button]': { |
||||
render ($el, state) { |
||||
if (!state.isSearch) { |
||||
return $el.hide() |
||||
} |
||||
|
||||
return $el.show() |
||||
} |
||||
} |
||||
} |
||||
|
||||
if ($('[data-page="address-logs"]').length) { |
||||
const store = createAsyncLoadStore(reducer, initialState, 'dataset.identifierLog') |
||||
const addressHash = $('[data-page="address-details"]')[0].dataset.pageAddressHash |
||||
const $element = $('[data-async-listing]') |
||||
|
||||
connectElements({ store, elements }) |
||||
|
||||
store.dispatch({ |
||||
type: 'PAGE_LOAD', |
||||
addressHash: addressHash}) |
||||
|
||||
$element.on('click', '[data-search-button]', (event) => { |
||||
store.dispatch({ |
||||
type: 'START_SEARCH', |
||||
addressHash: addressHash}) |
||||
var topic = $('[data-search-field]').val() |
||||
var path = '/search_logs?topic=' + topic + '&address_id=' + store.getState().addressHash |
||||
store.dispatch({type: 'START_REQUEST'}) |
||||
$.getJSON(path, {type: 'JSON'}) |
||||
.done(response => store.dispatch(Object.assign({type: 'ITEMS_FETCHED'}, humps.camelizeKeys(response)))) |
||||
.fail(() => store.dispatch({type: 'REQUEST_ERROR'})) |
||||
.always(() => store.dispatch({type: 'FINISH_REQUEST'})) |
||||
}) |
||||
|
||||
$element.on('click', '[data-cancel-search-button]', (event) => { |
||||
window.location.replace(window.location.href.split('?')[0]) |
||||
}) |
||||
} |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 12 KiB |
@ -0,0 +1,91 @@ |
||||
defmodule Explorer.Chain.Import.Runner.StakingPoolsDelegators do |
||||
@moduledoc """ |
||||
Bulk imports delegators to StakingPoolsDelegator tabe. |
||||
""" |
||||
|
||||
require Ecto.Query |
||||
|
||||
alias Ecto.{Changeset, Multi, Repo} |
||||
alias Explorer.Chain.{Import, StakingPoolsDelegator} |
||||
|
||||
import Ecto.Query, only: [from: 2] |
||||
|
||||
@behaviour Import.Runner |
||||
|
||||
# milliseconds |
||||
@timeout 60_000 |
||||
|
||||
@type imported :: [StakingPoolsDelegator.t()] |
||||
|
||||
@impl Import.Runner |
||||
def ecto_schema_module, do: StakingPoolsDelegator |
||||
|
||||
@impl Import.Runner |
||||
def option_key, do: :staking_pools_delegators |
||||
|
||||
@impl Import.Runner |
||||
def imported_table_row do |
||||
%{ |
||||
value_type: "[#{ecto_schema_module()}.t()]", |
||||
value_description: "List of `t:#{ecto_schema_module()}.t/0`s" |
||||
} |
||||
end |
||||
|
||||
@impl Import.Runner |
||||
def run(multi, changes_list, %{timestamps: timestamps} = options) do |
||||
insert_options = |
||||
options |
||||
|> Map.get(option_key(), %{}) |
||||
|> Map.take(~w(on_conflict timeout)a) |
||||
|> Map.put_new(:timeout, @timeout) |
||||
|> Map.put(:timestamps, timestamps) |
||||
|
||||
multi |
||||
|> Multi.run(:insert_staking_pools_delegators, fn repo, _ -> |
||||
insert(repo, changes_list, insert_options) |
||||
end) |
||||
end |
||||
|
||||
@impl Import.Runner |
||||
def timeout, do: @timeout |
||||
|
||||
@spec insert(Repo.t(), [map()], %{ |
||||
optional(:on_conflict) => Import.Runner.on_conflict(), |
||||
required(:timeout) => timeout, |
||||
required(:timestamps) => Import.timestamps() |
||||
}) :: |
||||
{:ok, [StakingPoolsDelegator.t()]} |
||||
| {:error, [Changeset.t()]} |
||||
defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do |
||||
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) |
||||
|
||||
{:ok, _} = |
||||
Import.insert_changes_list( |
||||
repo, |
||||
changes_list, |
||||
conflict_target: [:pool_address_hash, :delegator_address_hash], |
||||
on_conflict: on_conflict, |
||||
for: StakingPoolsDelegator, |
||||
returning: [:pool_address_hash, :delegator_address_hash], |
||||
timeout: timeout, |
||||
timestamps: timestamps |
||||
) |
||||
end |
||||
|
||||
defp default_on_conflict do |
||||
from( |
||||
delegator in StakingPoolsDelegator, |
||||
update: [ |
||||
set: [ |
||||
stake_amount: fragment("EXCLUDED.stake_amount"), |
||||
ordered_withdraw: fragment("EXCLUDED.ordered_withdraw"), |
||||
max_withdraw_allowed: fragment("EXCLUDED.max_withdraw_allowed"), |
||||
max_ordered_withdraw_allowed: fragment("EXCLUDED.max_ordered_withdraw_allowed"), |
||||
ordered_withdraw_epoch: fragment("EXCLUDED.ordered_withdraw_epoch"), |
||||
inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", delegator.inserted_at), |
||||
updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", delegator.updated_at) |
||||
] |
||||
] |
||||
) |
||||
end |
||||
end |
@ -0,0 +1,97 @@ |
||||
defmodule Explorer.Chain.StakingPool do |
||||
@moduledoc """ |
||||
The representation of staking pool from POSDAO network. |
||||
Staking pools might be candidate or validator. |
||||
""" |
||||
use Ecto.Schema |
||||
import Ecto.Changeset |
||||
|
||||
alias Explorer.Chain.{ |
||||
Address, |
||||
Hash, |
||||
StakingPoolsDelegator, |
||||
Wei |
||||
} |
||||
|
||||
@type t :: %__MODULE__{ |
||||
staking_address_hash: Hash.Address.t(), |
||||
mining_address_hash: Hash.Address.t(), |
||||
banned_until: boolean, |
||||
delegators_count: integer, |
||||
is_active: boolean, |
||||
is_banned: boolean, |
||||
is_validator: boolean, |
||||
likelihood: integer, |
||||
staked_ratio: Decimal.t(), |
||||
self_staked_amount: Wei.t(), |
||||
staked_amount: Wei.t(), |
||||
was_banned_count: integer, |
||||
was_validator_count: integer, |
||||
is_deleted: boolean |
||||
} |
||||
|
||||
@attrs ~w( |
||||
is_active delegators_count staked_amount self_staked_amount is_validator |
||||
was_validator_count is_banned was_banned_count banned_until likelihood |
||||
staked_ratio staking_address_hash mining_address_hash |
||||
)a |
||||
@req_attrs ~w( |
||||
is_active delegators_count staked_amount self_staked_amount is_validator |
||||
was_validator_count is_banned was_banned_count banned_until |
||||
staking_address_hash mining_address_hash |
||||
)a |
||||
|
||||
schema "staking_pools" do |
||||
field(:banned_until, :integer) |
||||
field(:delegators_count, :integer) |
||||
field(:is_active, :boolean, default: false) |
||||
field(:is_banned, :boolean, default: false) |
||||
field(:is_validator, :boolean, default: false) |
||||
field(:likelihood, :decimal) |
||||
field(:staked_ratio, :decimal) |
||||
field(:self_staked_amount, Wei) |
||||
field(:staked_amount, Wei) |
||||
field(:was_banned_count, :integer) |
||||
field(:was_validator_count, :integer) |
||||
field(:is_deleted, :boolean, default: false) |
||||
has_many(:delegators, StakingPoolsDelegator, foreign_key: :pool_address_hash) |
||||
|
||||
belongs_to( |
||||
:staking_address, |
||||
Address, |
||||
foreign_key: :staking_address_hash, |
||||
references: :hash, |
||||
type: Hash.Address |
||||
) |
||||
|
||||
belongs_to( |
||||
:mining_address, |
||||
Address, |
||||
foreign_key: :mining_address_hash, |
||||
references: :hash, |
||||
type: Hash.Address |
||||
) |
||||
|
||||
timestamps(null: false, type: :utc_datetime_usec) |
||||
end |
||||
|
||||
@doc false |
||||
def changeset(staking_pool, attrs) do |
||||
staking_pool |
||||
|> cast(attrs, @attrs) |
||||
|> cast_assoc(:delegators) |
||||
|> validate_required(@req_attrs) |
||||
|> validate_staked_amount() |
||||
|> unique_constraint(:staking_address_hash) |
||||
end |
||||
|
||||
defp validate_staked_amount(%{valid?: false} = c), do: c |
||||
|
||||
defp validate_staked_amount(changeset) do |
||||
if get_field(changeset, :staked_amount) < get_field(changeset, :self_staked_amount) do |
||||
add_error(changeset, :staked_amount, "must be greater than self_staked_amount") |
||||
else |
||||
changeset |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,64 @@ |
||||
defmodule Explorer.Chain.StakingPoolsDelegator do |
||||
@moduledoc """ |
||||
The representation of delegators from POSDAO network. |
||||
Delegators make stakes on staking pools and withdraw from them. |
||||
""" |
||||
use Ecto.Schema |
||||
import Ecto.Changeset |
||||
|
||||
alias Explorer.Chain.{ |
||||
Address, |
||||
Hash, |
||||
StakingPool, |
||||
Wei |
||||
} |
||||
|
||||
@type t :: %__MODULE__{ |
||||
pool_address_hash: Hash.Address.t(), |
||||
delegator_address_hash: Hash.Address.t(), |
||||
max_ordered_withdraw_allowed: Wei.t(), |
||||
max_withdraw_allowed: Wei.t(), |
||||
ordered_withdraw: Wei.t(), |
||||
stake_amount: Wei.t(), |
||||
ordered_withdraw_epoch: integer() |
||||
} |
||||
|
||||
@attrs ~w( |
||||
pool_address_hash delegator_address_hash max_ordered_withdraw_allowed |
||||
max_withdraw_allowed ordered_withdraw stake_amount ordered_withdraw_epoch |
||||
)a |
||||
|
||||
schema "staking_pools_delegators" do |
||||
field(:max_ordered_withdraw_allowed, Wei) |
||||
field(:max_withdraw_allowed, Wei) |
||||
field(:ordered_withdraw, Wei) |
||||
field(:ordered_withdraw_epoch, :integer) |
||||
field(:stake_amount, Wei) |
||||
|
||||
belongs_to( |
||||
:staking_pool, |
||||
StakingPool, |
||||
foreign_key: :pool_address_hash, |
||||
references: :staking_address_hash, |
||||
type: Hash.Address |
||||
) |
||||
|
||||
belongs_to( |
||||
:delegator_address, |
||||
Address, |
||||
foreign_key: :delegator_address_hash, |
||||
references: :hash, |
||||
type: Hash.Address |
||||
) |
||||
|
||||
timestamps(null: false, type: :utc_datetime_usec) |
||||
end |
||||
|
||||
@doc false |
||||
def changeset(staking_pools_delegator, attrs) do |
||||
staking_pools_delegator |
||||
|> cast(attrs, @attrs) |
||||
|> validate_required(@attrs) |
||||
|> unique_constraint(:pool_address_hash, name: :pools_delegator_index) |
||||
end |
||||
end |
@ -0,0 +1,27 @@ |
||||
defmodule Explorer.Repo.Migrations.CreateStakingPools do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create table(:staking_pools) do |
||||
add(:is_active, :boolean, default: false, null: false) |
||||
add(:is_deleted, :boolean, default: false, null: false) |
||||
add(:delegators_count, :integer) |
||||
add(:staked_amount, :numeric, precision: 100) |
||||
add(:self_staked_amount, :numeric, precision: 100) |
||||
add(:is_validator, :boolean, default: false, null: false) |
||||
add(:was_validator_count, :integer) |
||||
add(:is_banned, :boolean, default: false, null: false) |
||||
add(:was_banned_count, :integer) |
||||
add(:banned_until, :bigint) |
||||
add(:likelihood, :decimal, precision: 5, scale: 2) |
||||
add(:staked_ratio, :decimal, precision: 5, scale: 2) |
||||
add(:staking_address_hash, :bytea) |
||||
add(:mining_address_hash, :bytea) |
||||
|
||||
timestamps(null: false, type: :utc_datetime_usec) |
||||
end |
||||
|
||||
create(index(:staking_pools, [:staking_address_hash], unique: true)) |
||||
create(index(:staking_pools, [:mining_address_hash])) |
||||
end |
||||
end |
@ -0,0 +1,26 @@ |
||||
defmodule Explorer.Repo.Migrations.CreateStakingPoolsDelegator do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create table(:staking_pools_delegators) do |
||||
add(:delegator_address_hash, :bytea) |
||||
add(:pool_address_hash, :bytea) |
||||
add(:stake_amount, :numeric, precision: 100) |
||||
add(:ordered_withdraw, :numeric, precision: 100) |
||||
add(:max_withdraw_allowed, :numeric, precision: 100) |
||||
add(:max_ordered_withdraw_allowed, :numeric, precision: 100) |
||||
add(:ordered_withdraw_epoch, :integer) |
||||
|
||||
timestamps(null: false, type: :utc_datetime_usec) |
||||
end |
||||
|
||||
create(index(:staking_pools_delegators, [:delegator_address_hash])) |
||||
|
||||
create( |
||||
index(:staking_pools_delegators, [:delegator_address_hash, :pool_address_hash], |
||||
unique: true, |
||||
name: :pools_delegator_index |
||||
) |
||||
) |
||||
end |
||||
end |
@ -0,0 +1,32 @@ |
||||
defmodule Explorer.Chain.Import.Runner.StakingPoolsDelegatorsTest do |
||||
use Explorer.DataCase |
||||
|
||||
import Explorer.Factory |
||||
|
||||
alias Ecto.Multi |
||||
alias Explorer.Chain.Import.Runner.StakingPoolsDelegators |
||||
alias Explorer.Chain.StakingPoolsDelegator |
||||
|
||||
describe "run/1" do |
||||
test "insert new pools list" do |
||||
delegators = |
||||
[params_for(:staking_pools_delegator), params_for(:staking_pools_delegator)] |
||||
|> Enum.map(fn param -> |
||||
changeset = StakingPoolsDelegator.changeset(%StakingPoolsDelegator{}, param) |
||||
changeset.changes |
||||
end) |
||||
|
||||
assert {:ok, %{insert_staking_pools_delegators: list}} = run_changes(delegators) |
||||
assert Enum.count(list) == Enum.count(delegators) |
||||
end |
||||
end |
||||
|
||||
defp run_changes(changes) do |
||||
Multi.new() |
||||
|> StakingPoolsDelegators.run(changes, %{ |
||||
timeout: :infinity, |
||||
timestamps: %{inserted_at: DateTime.utc_now(), updated_at: DateTime.utc_now()} |
||||
}) |
||||
|> Repo.transaction() |
||||
end |
||||
end |
@ -0,0 +1,18 @@ |
||||
defmodule Explorer.Chain.StakingPoolTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Chain.StakingPool |
||||
|
||||
describe "changeset/2" do |
||||
test "with valid attributes" do |
||||
params = params_for(:staking_pool) |
||||
changeset = StakingPool.changeset(%StakingPool{}, params) |
||||
assert changeset.valid? |
||||
end |
||||
|
||||
test "with invalid attributes" do |
||||
changeset = StakingPool.changeset(%StakingPool{}, %{staking_address_hash: 0}) |
||||
refute changeset.valid? |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,18 @@ |
||||
defmodule Explorer.Chain.StakingPoolsDelegatorTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Chain.StakingPoolsDelegator |
||||
|
||||
describe "changeset/2" do |
||||
test "with valid attributes" do |
||||
params = params_for(:staking_pools_delegator) |
||||
changeset = StakingPoolsDelegator.changeset(%StakingPoolsDelegator{}, params) |
||||
assert changeset.valid? |
||||
end |
||||
|
||||
test "with invalid attributes" do |
||||
changeset = StakingPoolsDelegator.changeset(%StakingPoolsDelegator{}, %{pool_address_hash: 0}) |
||||
refute changeset.valid? |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,323 @@ |
||||
<p align="center"> |
||||
<a href="https://blockscout.com"> |
||||
<img width="200" src="https://blockscout.com/eth/mainnet/android-chrome-192x192.png" \> |
||||
</a> |
||||
</p> |
||||
|
||||
<h1 align="center">BlockScout</h1> |
||||
<p align="center">Blockchain Explorer for inspecting and analyzing EVM Chains.</p> |
||||
<div align="center"> |
||||
|
||||
[![CircleCI](https://circleci.com/gh/poanetwork/blockscout.svg?style=svg&circle-token=f8823a3d0090407c11f87028c73015a331dbf604)](https://circleci.com/gh/poanetwork/blockscout) [![Coverage Status](https://coveralls.io/repos/github/poanetwork/blockscout/badge.svg?branch=master)](https://coveralls.io/github/poanetwork/blockscout?branch=master) [![Join the chat at https://gitter.im/poanetwork/blockscout](https://badges.gitter.im/poanetwork/blockscout.svg)](https://gitter.im/poanetwork/blockscout?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) |
||||
|
||||
</div> |
||||
|
||||
BlockScout provides a comprehensive, easy-to-use interface for users to view, confirm, and inspect transactions on **all EVM** (Ethereum Virtual Machine) blockchains. This includes the Ethereum main and test networks as well as **Ethereum forks and sidechains**. |
||||
|
||||
Following is an overview of the project and instructions for [getting started](#getting-started). |
||||
|
||||
Visit the [POA BlockScout forum](https://forum.poa.network/c/blockscout) for additional deployment instructions, FAQs, troubleshooting, and other BlockScout related items. You can also post and answer questions here. |
||||
|
||||
You can also access the dev chatroom on our [Gitter Channel](https://gitter.im/poanetwork/blockscout). |
||||
|
||||
## About BlockScout |
||||
|
||||
BlockScout is an Elixir application that allows users to search transactions, view accounts and balances, and verify smart contracts on the entire Ethereum network including all forks and sidechains. |
||||
|
||||
Currently available block explorers (i.e. Etherscan and Etherchain) are closed systems which are not independently verifiable. As Ethereum sidechains continue to proliferate in both private and public settings, transparent tools are needed to analyze and validate transactions. |
||||
|
||||
|
||||
### Features |
||||
|
||||
- [x] **Open source development**: The code is community driven and available for anyone to use, explore and improve. |
||||
|
||||
- [x] **Real time transaction tracking**: Transactions are updated in real time - no page refresh required. Infinite scrolling is also enabled. |
||||
|
||||
- [x] **Smart contract interaction**: Users can read and verify Solidity smart contracts and access pre-existing contracts to fast-track development. Support for Vyper, LLL, and Web Assembly contracts is in progress. |
||||
|
||||
- [x] **Token support**: ERC20 and ERC721 tokens are supported. Future releases will support additional token types including ERC223 and ERC1155. |
||||
|
||||
- [x] **User customization**: Users can easily deploy on a network and customize the Bootstrap interface. |
||||
|
||||
- [x] **Ethereum sidechain networks**: BlockScout supports the Ethereum mainnet, Ethereum testnets, POA network, and forks like Ethereum Classic, xDAI, additional sidechains, and private EVM networks. |
||||
|
||||
### Supported Projects |
||||
|
||||
| **Hosted Mainnets** | **Hosted Testnets** | **Additional Chains using BlockScout** | |
||||
|--------------------------------------------------------|-------------------------------------------------------|----------------------------------------------------| |
||||
| [Aerum](https://blockscout.com/aerum/mainnet) | [Goerli Testnet](https://blockscout.com/eth/goerli) | [ARTIS](https://explorer.sigma1.artis.network) | |
||||
| [Callisto](https://blockscout.com/callisto/mainnet) | [Kovan Testnet](https://blockscout.com/eth/kovan) | [Ether-1](https://blocks.ether1.wattpool.net/) | |
||||
| [Ethereum Classic](https://blockscout.com/etc/mainnet) | [POA Sokol Testnet](https://blockscout.com/poa/sokol) | [Fuse Network](https://explorer.fuse.io/) | |
||||
| [Ethereum Mainnet](https://blockscout.com/eth/mainnet) | [Rinkeby Testnet](https://blockscout.com/eth/rinkeby) | [Oasis Labs](https://blockexplorer.oasiscloud.io/) | |
||||
| [POA Core Network](https://blockscout.com/poa/core) | [Ropsten Testnet](https://blockscout.com/eth/ropsten) | [Petrichor](https://explorer.petrachor.com/) | |
||||
| [RSK](https://blockscout.com/rsk/mainnet) | | [PIRL](http://pirl.es/) | |
||||
| [xDai Chain](https://blockscout.com/poa/dai) | | [SafeChain](https://explorer.safechain.io) | |
||||
| | | [SpringChain](https://explorer.springrole.com/) | |
||||
| | | [Kotti Testnet](https://kottiexplorer.ethernode.io/) | |
||||
|
||||
|
||||
### Visual Interface |
||||
|
||||
Interface for the POA network _updated 02/2019_ |
||||
|
||||
![BlockScout Example](explorer_example_2_2019.gif) |
||||
|
||||
|
||||
### Umbrella Project Organization |
||||
|
||||
This repository is an [umbrella project](https://elixir-lang.org/getting-started/mix-otp/dependencies-and-umbrella-projects.html). Each directory under `apps/` is a separate [Mix](https://hexdocs.pm/mix/Mix.html) project and [OTP application](https://hexdocs.pm/elixir/Application.html), but the projects can use each other as a dependency in their `mix.exs`. |
||||
|
||||
Each OTP application has a restricted domain. |
||||
|
||||
| Directory | OTP Application | Namespace | Purpose | |
||||
|:------------------------|:--------------------|:------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| |
||||
| `apps/ethereum_jsonrpc` | `:ethereum_jsonrpc` | `EthereumJSONRPC` | Ethereum JSONRPC client. It is allowed to know `Explorer`'s param format, but it cannot directly depend on `:explorer` | |
||||
| `apps/explorer` | `:explorer` | `Explorer` | Storage for the indexed chain. Can read and write to the backing storage. MUST be able to boot in a read-only mode when run independently from `:indexer`, so cannot depend on `:indexer` as that would start `:indexer` indexing. | |
||||
| `apps/block_scout_web` | `:block_scout_web` | `BlockScoutWeb` | Phoenix interface to `:explorer`. The minimum interface to allow web access should go in `:block_scout_web`. Any business rules or interface not tied directly to `Phoenix` or `Plug` should go in `:explorer`. MUST be able to boot in a read-only mode when run independently from `:indexer`, so cannot depend on `:indexer` as that would start `:indexer` indexing. | |
||||
| `apps/indexer` | `:indexer` | `Indexer` | Uses `:ethereum_jsonrpc` to index chain and batch import data into `:explorer`. Any process, `Task`, or `GenServer` that automatically reads from the chain and writes to `:explorer` should be in `:indexer`. This restricts automatic writes to `:indexer` and read-only mode can be achieved by not running `:indexer`. | |
||||
|
||||
|
||||
## Getting Started |
||||
|
||||
### Requirements |
||||
|
||||
| Dependency | Mac | Linux | |
||||
|-------------|-----|-------| |
||||
| [Erlang/OTP 21.0.4](https://github.com/erlang/otp) | `brew install erlang` | [Erlang Install Example](https://github.com/poanetwork/blockscout-terraform/blob/33f68e816e36dc2fb055911fa0372531f0e956e7/modules/stack/libexec/init.sh#L134) | |
||||
| [Elixir 1.8.1](https://elixir-lang.org/) | :point_up: | [Elixir Install Example](https://github.com/poanetwork/blockscout-terraform/blob/33f68e816e36dc2fb055911fa0372531f0e956e7/modules/stack/libexec/init.sh#L138) | |
||||
| [Postgres 10.3](https://www.postgresql.org/) | `brew install postgresql` | [Postgres Install Example](https://github.com/poanetwork/blockscout-terraform/blob/33f68e816e36dc2fb055911fa0372531f0e956e7/modules/stack/libexec/init.sh#L187) | |
||||
| [Node.js 10.x.x](https://nodejs.org/en/) | `brew install node` | [Node.js Install Example](https://github.com/poanetwork/blockscout-terraform/blob/33f68e816e36dc2fb055911fa0372531f0e956e7/modules/stack/libexec/init.sh#L66) | |
||||
| [Automake](https://www.gnu.org/software/automake/) | `brew install automake` | [Automake Install Example](https://github.com/poanetwork/blockscout-terraform/blob/33f68e816e36dc2fb055911fa0372531f0e956e7/modules/stack/libexec/init.sh#L72) | |
||||
| [Libtool](https://www.gnu.org/software/libtool/) | `brew install libtool` | [Libtool Install Example](https://github.com/poanetwork/blockscout-terraform/blob/33f68e816e36dc2fb055911fa0372531f0e956e7/modules/stack/libexec/init.sh#L62) | |
||||
| [Inotify-tools](https://github.com/rvoicilas/inotify-tools/wiki) | Not Required | Ubuntu - `apt-get install inotify-tools` | |
||||
| [GCC Compiler](https://gcc.gnu.org/) | `brew install gcc` | [GCC Compiler Example](https://github.com/poanetwork/blockscout-terraform/blob/33f68e816e36dc2fb055911fa0372531f0e956e7/modules/stack/libexec/init.sh#L70) | |
||||
| [GMP](https://gmplib.org/) | `brew install gmp` | [Install GMP Devel](https://github.com/poanetwork/blockscout-terraform/blob/33f68e816e36dc2fb055911fa0372531f0e956e7/modules/stack/libexec/init.sh#L74) | |
||||
|
||||
### Build and Run |
||||
|
||||
#### Playbook Deployment |
||||
|
||||
We use [Ansible](https://docs.ansible.com/ansible/latest/index.html) & [Terraform](https://www.terraform.io/intro/getting-started/install.html) to build the correct infrastructure to run BlockScout. See [https://github.com/poanetwork/blockscout-terraform](https://github.com/poanetwork/blockscout-terraform) for details and instructions. |
||||
|
||||
#### Manual Deployment |
||||
|
||||
See [Manual BlockScout Deployment](https://forum.poa.network/t/manual-blockscout-deployment/2458) for instructions. |
||||
|
||||
#### Environment Variables |
||||
|
||||
Our forum contains a [full list of BlockScout environment variables](https://forum.poa.network/t/faq-blockscout-environment-variables/1814). |
||||
|
||||
#### Configuring EVM Chains |
||||
|
||||
* **CSS:** Update the import instruction in `apps/block_scout_web/assets/css/theme/_variables.scss` to select a preset css file. This is reflected in the `production-${chain}` branch for each instance. For example, in the `production-xdai` branch, it is set to `@import "dai-variables"`. |
||||
|
||||
* **ENV:** Update the [environment variables](https://forum.poa.network/t/faq-blockscout-environment-variables/1814) to match the chain specs. |
||||
|
||||
#### Automating Restarts |
||||
|
||||
By default `blockscout` does not restart if it crashes. To enable automated |
||||
restarts, set the environment variable `HEART_COMMAND` to whatever command you run to start `blockscout`. Configure the heart beat timeout to change how long it waits before considering the application unresponsive. At that point, it will kill the current blockscout instance and execute the `HEART_COMMAND`. By default a crash dump is not written unless you set `ERL_CRASH_DUMP_SECONDS` to a positive or negative integer. See the [heart](http://erlang.org/doc/man/heart.html) documentation for more information. |
||||
|
||||
|
||||
#### CircleCI Updates |
||||
|
||||
To monitor build status, configure your local [CCMenu](http://ccmenu.org/) with the following url: [`https://circleci.com/gh/poanetwork/blockscout.cc.xml?circle-token=f8823a3d0090407c11f87028c73015a331dbf604`](https://circleci.com/gh/poanetwork/blockscout.cc.xml?circle-token=f8823a3d0090407c11f87028c73015a331dbf604) |
||||
|
||||
|
||||
## Testing |
||||
|
||||
### Requirements |
||||
|
||||
* PhantomJS (for wallaby) |
||||
|
||||
### Running the tests |
||||
|
||||
1. Build the assets. |
||||
`cd apps/block_scout_web/assets && npm run build; cd -` |
||||
|
||||
2. Format the Elixir code. |
||||
`mix format` |
||||
|
||||
3. Run the test suite with coverage for whole umbrella project. This step can be run with different configuration outlined below. |
||||
`mix coveralls.html --umbrella` |
||||
|
||||
4. Lint the Elixir code. |
||||
`mix credo --strict` |
||||
|
||||
5. Run the dialyzer. |
||||
`mix dialyzer --halt-exit-status` |
||||
|
||||
6. Check the Elixir code for vulnerabilities. |
||||
`cd apps/explorer && mix sobelow --config; cd -` |
||||
`cd apps/block_scout_web && mix sobelow --config; cd -` |
||||
|
||||
7. Lint the JavaScript code. |
||||
`cd apps/block_scout_web/assets && npm run eslint; cd -` |
||||
|
||||
8. Test the JavaScript code. |
||||
`cd apps/block_scout_web/assets && npm run test; cd -` |
||||
|
||||
#### Parity |
||||
|
||||
##### Mox |
||||
|
||||
**This is the default setup. `mix coveralls.html --umbrella` will work on its own, but to be explicit, use the following setup**: |
||||
|
||||
```shell |
||||
export ETHEREUM_JSONRPC_CASE=EthereumJSONRPC.Case.Parity.Mox |
||||
export ETHEREUM_JSONRPC_WEB_SOCKET_CASE=EthereumJSONRPC.WebSocket.Case.Mox |
||||
mix coveralls.html --umbrella --exclude no_parity |
||||
``` |
||||
|
||||
##### HTTP / WebSocket |
||||
|
||||
```shell |
||||
export ETHEREUM_JSONRPC_CASE=EthereumJSONRPC.Case.Parity.HTTPWebSocket |
||||
export ETHEREUM_JSONRPC_WEB_SOCKET_CASE=EthereumJSONRPC.WebSocket.Case.Parity |
||||
mix coveralls.html --umbrella --exclude no_parity |
||||
``` |
||||
|
||||
| Protocol | URL | |
||||
|:----------|:-----------------------------------| |
||||
| HTTP | `http://localhost:8545` | |
||||
| WebSocket | `ws://localhost:8546` | |
||||
|
||||
#### Geth |
||||
|
||||
##### Mox |
||||
|
||||
```shell |
||||
export ETHEREUM_JSONRPC_CASE=EthereumJSONRPC.Case.Geth.Mox |
||||
export ETHEREUM_JSONRPC_WEB_SOCKET_CASE=EthereumJSONRPC.WebSocket.Case.Mox |
||||
mix coveralls.html --umbrella --exclude no_geth |
||||
``` |
||||
|
||||
##### HTTP / WebSocket |
||||
|
||||
```shell |
||||
export ETHEREUM_JSONRPC_CASE=EthereumJSONRPC.Case.Geth.HTTPWebSocket |
||||
export ETHEREUM_JSONRPC_WEB_SOCKET_CASE=EthereumJSONRPC.WebSocket.Case.Geth |
||||
mix coveralls.html --umbrella --exclude no_geth |
||||
``` |
||||
|
||||
| Protocol | URL | |
||||
|:----------|:--------------------------------------------------| |
||||
| HTTP | `https://mainnet.infura.io/8lTvJTKmHPCHazkneJsY` | |
||||
| WebSocket | `wss://mainnet.infura.io/ws/8lTvJTKmHPCHazkneJsY` | |
||||
|
||||
### API Documentation |
||||
|
||||
To view Modules and API Reference documentation: |
||||
|
||||
1. Generate documentation. |
||||
`mix docs` |
||||
2. View the generated docs. |
||||
`open doc/index.html` |
||||
|
||||
## Front-end |
||||
|
||||
### Javascript |
||||
|
||||
All Javascript files are under [apps/block_scout_web/assets/js](https://github.com/poanetwork/blockscout/tree/master/apps/block_scout_web/assets/js) and the main file is [app.js](https://github.com/poanetwork/blockscout/blob/master/apps/block_scout_web/assets/js/app.js). This file imports all javascript used in the application. If you want to create a new JS file consider creating into [/js/pages](https://github.com/poanetwork/blockscout/tree/master/apps/block_scout_web/assets/js/pages) or [/js/lib](https://github.com/poanetwork/blockscout/tree/master/apps/block_scout_web/assets/js/lib), as follows: |
||||
|
||||
#### js/lib |
||||
This folder contains all scripts that can be reused in any page or can be used as a helper to some component. |
||||
|
||||
#### js/pages |
||||
This folder contains the scripts that are specific for some page. |
||||
|
||||
#### Redux |
||||
This project uses Redux to control the state in some pages. There are pages that have things happening in real-time thanks to the Phoenix channels, e.g. Address page, so the page state changes a lot depending on which events it is listening. The redux is also used to load some contents asynchronous, see [async_listing_load.js](https://github.com/poanetwork/blockscout/blob/master/apps/block_scout_web/assets/js/lib/async_listing_load.js). |
||||
|
||||
To understand how to build new pages that need redux in this project, see the [redux_helpers.js](https://github.com/poanetwork/blockscout/blob/master/apps/block_scout_web/assets/js/lib/redux_helpers.js) |
||||
|
||||
## Internationalization |
||||
|
||||
The app is currently internationalized. It is only localized to U.S. English. To translate new strings. |
||||
|
||||
1. To setup translation file. |
||||
`cd apps/block_scout_web; mix gettext.extract --merge; cd -` |
||||
2. To edit the new strings, go to `apps/block_scout_web/priv/gettext/en/LC_MESSAGES/default.po`. |
||||
|
||||
## Metrics |
||||
|
||||
BlockScout is setup to export [Prometheus](https://prometheus.io/) metrics at `/metrics`. |
||||
|
||||
### Prometheus |
||||
|
||||
1. Install prometheus: `brew install prometheus` |
||||
2. Start the web server `iex -S mix phx.server` |
||||
3. Start prometheus: `prometheus --config.file=prometheus.yml` |
||||
|
||||
### Grafana |
||||
|
||||
1. Install grafana: `brew install grafana` |
||||
2. Install Pie Chart panel plugin: `grafana-cli plugins install grafana-piechart-panel` |
||||
3. Start grafana: `brew services start grafana` |
||||
4. Add Prometheus as a Data Source |
||||
1. `open http://localhost:3000/datasources` |
||||
2. Click "+ Add data source" |
||||
3. Put "Prometheus" for "Name" |
||||
4. Change "Type" to "Prometheus" |
||||
5. Set "URL" to "http://localhost:9090" |
||||
6. Set "Scrape Interval" to "10s" |
||||
5. Add the dashboards from https://github.com/deadtrickster/beam-dashboards: |
||||
For each `*.json` file in the repo. |
||||
1. `open http://localhost:3000/dashboard/import` |
||||
2. Copy the contents of the JSON file in the "Or paste JSON" entry |
||||
3. Click "Load" |
||||
6. View the dashboards. (You will need to click-around and use BlockScout for the web-related metrics to show up.) |
||||
|
||||
## Tracing |
||||
|
||||
Blockscout supports tracing via |
||||
[Spandex](http://git@github.com:spandex-project/spandex.git). Each application |
||||
has its own tracer, that is configured internally to that application. In order |
||||
to enable it, visit each application's `config/<env>.ex` and update its tracer |
||||
configuration to change `disabled?: true` to `disabled?: false`. Do this for |
||||
each application you'd like included in your trace data. |
||||
|
||||
Currently, only [Datadog](https://www.datadoghq.com/) is supported as a |
||||
tracing backend, but more will be added soon. |
||||
|
||||
### DataDog |
||||
|
||||
If you would like to use DataDog, after enabling `Spandex`, set |
||||
`"DATADOG_HOST"` and `"DATADOG_PORT"` environment variables to the |
||||
host/port that your Datadog agent is running on. For more information on |
||||
Datadog and the Datadog agent, see their |
||||
[documentation](https://docs.datadoghq.com/). |
||||
|
||||
### Other |
||||
|
||||
If you want to use a different backend, remove the |
||||
`SpandexDatadog.ApiServer` `Supervisor.child_spec` from |
||||
`Explorer.Application` and follow any instructions provided in `Spandex` |
||||
for setting up that backend. |
||||
|
||||
## Memory Usage |
||||
|
||||
The work queues for building the index of all blocks, balances (coin and token), and internal transactions can grow quite large. By default, the soft-limit is 1 GiB, which can be changed in `apps/indexer/config/config.exs`: |
||||
|
||||
``` |
||||
config :indexer, memory_limit: 1 <<< 30 |
||||
``` |
||||
|
||||
Memory usage is checked once per minute. If the soft-limit is reached, the shrinkable work queues will shed half their load. The shed load will be restored from the database, the same as when a restart of the server occurs, so rebuilding the work queue will be slower, but use less memory. |
||||
|
||||
If all queues are at their minimum size, then no more memory can be reclaimed and an error will be logged. |
||||
|
||||
## Acknowledgements |
||||
|
||||
We would like to thank the [EthPrize foundation](http://ethprize.io/) for their funding support. |
||||
|
||||
## Contributing |
||||
|
||||
See [CONTRIBUTING.md](CONTRIBUTING.md) for contribution and pull request protocol. We expect contributors to follow our [code of conduct](CODE_OF_CONDUCT.md) when submitting code or comments. |
||||
|
||||
|
||||
## License |
||||
|
||||
[![License: GPL v3.0](https://img.shields.io/badge/License-GPL%20v3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0) |
||||
|
||||
This project is licensed under the GNU General Public License v3.0. See the [LICENSE](LICENSE) file for details. |
@ -0,0 +1,21 @@ |
||||
<!DOCTYPE html> |
||||
<html lang="en"> |
||||
<head> |
||||
<meta charset="UTF-8"> |
||||
<title>Document</title> |
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" /> |
||||
<meta name="description" content="Description"> |
||||
<meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0"> |
||||
<link rel="stylesheet" href="//unpkg.com/docsify/lib/themes/vue.css"> |
||||
</head> |
||||
<body> |
||||
<div id="app"></div> |
||||
<script> |
||||
window.$docsify = { |
||||
name: '', |
||||
repo: '' |
||||
} |
||||
</script> |
||||
<script src="//unpkg.com/docsify/lib/docsify.min.js"></script> |
||||
</body> |
||||
</html> |
Loading…
Reference in new issue