Merge branch 'master' into release_build

pull/1131/head
Andrew Cravenho 6 years ago committed by GitHub
commit b0abe83ef9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 69
      apps/block_scout_web/assets/__tests__/pages/address.js
  2. 67
      apps/block_scout_web/assets/js/pages/address.js
  3. 3
      apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex
  4. 19
      apps/explorer/lib/explorer/chain/internal_transaction/type.ex
  5. 31
      apps/explorer/priv/repo/migrations/20181107164103_eip6.exs
  6. 33
      apps/explorer/priv/repo/migrations/20181108205650_additional_internal_transaction_constraints.exs
  7. 64
      apps/explorer/priv/repo/migrations/scripts/20181107164103_eip6.sql
  8. 80
      apps/explorer/priv/repo/migrations/scripts/20181108205650_additional_internal_transaction_constraints.sql

@ -172,70 +172,9 @@ describe('RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH', () => {
})
})
describe('RECEIVED_NEW_PENDING_TRANSACTION', () => {
test('with new pending transaction', () => {
const state = Object.assign({}, initialState, {
pendingTransactions: [{ transactionHash: 1, transactionHtml: 'test 1' }]
})
const action = {
type: 'RECEIVED_NEW_PENDING_TRANSACTION',
msg: { transactionHash: 2, transactionHtml: 'test 2' }
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([
{ transactionHash: 2, transactionHtml: 'test 2' },
{ transactionHash: 1, transactionHtml: 'test 1' }
])
})
test('when channel has been disconnected', () => {
const state = Object.assign({}, initialState, {
channelDisconnected: true,
pendingTransactions: [{ transactionHash: 1, transactionHtml: 'test 1' }]
})
const action = {
type: 'RECEIVED_NEW_PENDING_TRANSACTION',
msg: { transactionHash: 2, transactionHtml: 'test 2' }
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([
{ transactionHash: 1, transactionHtml: 'test 1' }
])
})
test('beyond page one', () => {
const state = Object.assign({}, initialState, {
beyondPageOne: true,
pendingTransactions: [{ transactionHash: 1, transactionHtml: 'test 1' }]
})
const action = {
type: 'RECEIVED_NEW_PENDING_TRANSACTION',
msg: { transactionHash: 2, transactionHtml: 'test 2' }
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([
{ transactionHash: 1, transactionHtml: 'test 1' }
])
})
test('with filtered out pending transaction', () => {
const state = Object.assign({}, initialState, {
filter: 'to'
})
const action = {
type: 'RECEIVED_NEW_PENDING_TRANSACTION',
msg: { transactionHash: 2, transactionHtml: 'test 2' }
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([])
})
})
describe('RECEIVED_NEW_TRANSACTION', () => {
test('with new transaction', () => {
const state = Object.assign({}, initialState, {
pendingTransactions: [{ transactionHash: 2, transactionHtml: 'test' }],
transactions: [{ transactionHash: 1, transactionHtml: 'test 1' }]
})
const action = {
@ -244,9 +183,6 @@ describe('RECEIVED_NEW_TRANSACTION', () => {
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([
{ transactionHash: 2, transactionHtml: 'test 2', validated: true }
])
expect(output.transactions).toEqual([
{ transactionHash: 2, transactionHtml: 'test 2' },
{ transactionHash: 1, transactionHtml: 'test 1' }
@ -255,7 +191,6 @@ describe('RECEIVED_NEW_TRANSACTION', () => {
test('when channel has been disconnected', () => {
const state = Object.assign({}, initialState, {
channelDisconnected: true,
pendingTransactions: [{ transactionHash: 2, transactionHtml: 'test' }],
transactions: [{ transactionHash: 1, transactionHtml: 'test 1' }]
})
const action = {
@ -264,9 +199,6 @@ describe('RECEIVED_NEW_TRANSACTION', () => {
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([
{ transactionHash: 2, transactionHtml: 'test' }
])
expect(output.transactions).toEqual([
{ transactionHash: 1, transactionHtml: 'test 1' }
])
@ -282,7 +214,6 @@ describe('RECEIVED_NEW_TRANSACTION', () => {
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([])
expect(output.transactions).toEqual([
{ transactionHash: 1, transactionHtml: 'test 1' }
])

@ -12,7 +12,6 @@ import { updateAllCalculatedUsdValues } from '../lib/currency.js'
import { loadTokenBalanceDropdown } from '../lib/token_balance_dropdown'
const BATCH_THRESHOLD = 10
const TRANSACTION_VALIDATED_MOVE_DELAY = 1000
export const initialState = {
channelDisconnected: false,
@ -24,7 +23,6 @@ export const initialState = {
transactionCount: null,
validationCount: null,
pendingTransactions: [],
transactions: [],
internalTransactions: [],
internalTransactionsBatch: [],
@ -91,26 +89,6 @@ function baseReducer (state = initialState, action) {
})
}
}
case 'RECEIVED_NEW_PENDING_TRANSACTION': {
if (state.channelDisconnected || state.beyondPageOne) return state
if ((state.filter === 'to' && action.msg.toAddressHash !== state.addressHash) ||
(state.filter === 'from' && action.msg.fromAddressHash !== state.addressHash)) {
return state
}
return Object.assign({}, state, {
pendingTransactions: [
action.msg,
...state.pendingTransactions
]
})
}
case 'REMOVE_PENDING_TRANSACTION': {
return Object.assign({}, state, {
pendingTransactions: state.pendingTransactions.filter((transaction) => action.msg.transactionHash !== transaction.transactionHash)
})
}
case 'RECEIVED_NEW_TRANSACTION': {
if (state.channelDisconnected) return state
@ -123,7 +101,6 @@ function baseReducer (state = initialState, action) {
}
return Object.assign({}, state, {
pendingTransactions: state.pendingTransactions.map((transaction) => action.msg.transactionHash === transaction.transactionHash ? Object.assign({}, action.msg, { validated: true }) : transaction),
transactions: [
action.msg,
...state.transactions
@ -184,28 +161,6 @@ const elements = {
$el.empty().append(numeral(state.validationCount).format())
}
},
'[data-selector="pending-transactions-list"]': {
load ($el) {
return {
pendingTransactions: $el.children().map((index, el) => ({
transactionHash: el.dataset.transactionHash,
transactionHtml: el.outerHTML
})).toArray()
}
},
render ($el, state, oldState) {
if (oldState.pendingTransactions === state.pendingTransactions) return
const container = $el[0]
const newElements = _.map(state.pendingTransactions, ({ transactionHtml }) => $(transactionHtml)[0])
listMorph(container, newElements, { key: 'dataset.transactionHash' })
}
},
'[data-selector="pending-transactions-count"]': {
render ($el, state, oldState) {
if (oldState.pendingTransactions === state.pendingTransactions) return
$el[0].innerHTML = numeral(state.pendingTransactions.filter(({ validated }) => !validated).length).format()
}
},
'[data-selector="empty-transactions-list"]': {
render ($el, state) {
if (state.transactions.length || state.loadingNextPage || state.pagingError) {
@ -226,16 +181,10 @@ const elements = {
},
render ($el, state, oldState) {
if (oldState.transactions === state.transactions) return
function updateTransactions () {
const container = $el[0]
const newElements = _.map(state.transactions, ({ transactionHtml }) => $(transactionHtml)[0])
listMorph(container, newElements, { key: 'dataset.transactionHash' })
}
if ($('[data-selector="pending-transactions-list"]').is(':visible')) {
setTimeout(updateTransactions, TRANSACTION_VALIDATED_MOVE_DELAY + 400)
} else {
updateTransactions()
}
const container = $el[0]
const newElements = _.map(state.transactions, ({ transactionHtml }) => $(transactionHtml)[0])
return listMorph(container, newElements, { key: 'dataset.transactionHash' })
}
},
'[data-selector="internal-transactions-list"]': {
@ -306,19 +255,11 @@ if ($addressDetailsPage.length) {
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH',
msgs: humps.camelizeKeys(msgs)
})))
addressChannel.on('pending_transaction', (msg) => store.dispatch({
type: 'RECEIVED_NEW_PENDING_TRANSACTION',
msg: humps.camelizeKeys(msg)
}))
addressChannel.on('transaction', (msg) => {
store.dispatch({
type: 'RECEIVED_NEW_TRANSACTION',
msg: humps.camelizeKeys(msg)
})
setTimeout(() => store.dispatch({
type: 'REMOVE_PENDING_TRANSACTION',
msg: humps.camelizeKeys(msg)
}), TRANSACTION_VALIDATED_MOVE_DELAY)
})
const blocksChannel = socket.channel(`blocks:${addressHash}`, {})

@ -8,7 +8,7 @@ defmodule BlockScoutWeb.AddressChannel do
alias Explorer.Chain.Hash
alias Phoenix.View
intercept(["balance_update", "count", "internal_transaction", "pending_transaction", "transaction"])
intercept(["balance_update", "count", "internal_transaction", "transaction"])
def join("addresses:" <> _address_hash, _params, socket) do
{:ok, %{}, socket}
@ -62,7 +62,6 @@ defmodule BlockScoutWeb.AddressChannel do
end
def handle_out("transaction", data, socket), do: handle_transaction(data, socket, "transaction")
def handle_out("pending_transaction", data, socket), do: handle_transaction(data, socket, "pending_transaction")
def handle_transaction(%{address: address, transaction: transaction}, socket, event) do
Gettext.put_locale(BlockScoutWeb.Gettext, socket.assigns.locale)

@ -38,6 +38,13 @@ defmodule Explorer.Chain.InternalTransaction.Type do
iex> Explorer.Chain.InternalTransaction.Type.cast("selfdestruct")
{:ok, :selfdestruct}
Deprecated values are not allowed for incoming data.
iex> Explorer.Chain.InternalTransaction.Type.cast(:suicide)
:error
iex> Explorer.Chain.InternalTransaction.Type.cast("suicide")
:error
Unsupported `String.t` return an `:error`.
iex> Explorer.Chain.InternalTransaction.Type.cast("hard-fork")
@ -65,6 +72,11 @@ defmodule Explorer.Chain.InternalTransaction.Type do
iex> Explorer.Chain.InternalTransaction.Type.dump(:selfdestruct)
{:ok, "selfdestruct"}
Deprecated values are not allowed to be dumped to the database as old values should only be read, not written.
iex> Explorer.Chain.InternalTransaction.Type.dump(:suicide)
:error
Other atoms return an error
iex> Explorer.Chain.InternalTransaction.Type.dump(:other)
@ -91,6 +103,11 @@ defmodule Explorer.Chain.InternalTransaction.Type do
iex> Explorer.Chain.InternalTransaction.Type.load("selfdestruct")
{:ok, :selfdestruct}
Converts deprecated value on load to the corresponding `t:t/0`.
iex> Explorer.Chain.InternalTransaction.Type.load("suicide")
{:ok, :selfdestruct}
Other `t:String.t/0` return `:error`
iex> Explorer.Chain.InternalTransaction.Type.load("other")
@ -103,6 +120,8 @@ defmodule Explorer.Chain.InternalTransaction.Type do
def load("create"), do: {:ok, :create}
def load("reward"), do: {:ok, :reward}
def load("selfdestruct"), do: {:ok, :selfdestruct}
# deprecated
def load("suicide"), do: {:ok, :selfdestruct}
def load(_), do: :error
@doc """

@ -1,28 +1,31 @@
defmodule Explorer.Repo.Migrations.EIP6 do
@moduledoc """
Use `priv/repo/migrations/scripts/20181107164103_eip6.sql` to migrate data and validate constraint.
```sh
mix ecto.migrate
psql -d $DATABASE -a -f priv/repo/migrations/scripts/20181107164103_eip6.sql
```
"""
use Ecto.Migration
def up do
execute("ALTER TABLE internal_transactions DROP CONSTRAINT suicide_has_from_and_to_address_hashes")
execute("UPDATE internal_transactions SET type = 'selfdestruct' WHERE type = 'suicide'")
create(
constraint(
:internal_transactions,
:selfdestruct_has_from_and_to_address_hashes,
check: """
type != 'selfdestruct' OR
(from_address_hash IS NOT NULL AND gas IS NULL AND to_address_hash IS NOT NULL)
"""
)
)
# `NOT VALID` skips checking pre-existing rows. Use `priv/repo/migrations/scripts/20181107164103_eip6.sql` to
# migrate data and validate constraints
execute("""
ALTER TABLE internal_transactions
ADD CONSTRAINT selfdestruct_has_from_and_to_address
CHECK (type != 'selfdestruct' OR (from_address_hash IS NOT NULL AND gas IS NULL AND to_address_hash IS NOT NULL))
NOT VALID
""")
end
def down do
execute("ALTER TABLE internal_transactions DROP CONSTRAINT selfdestruct_has_from_and_to_address_hashes")
execute("UPDATE internal_transactions SET type = 'suicide' WHERE type = 'selfdestruct'")
create(
constraint(
:internal_transactions,

@ -1,10 +1,37 @@
defmodule Explorer.Repo.Migrations.AdditionalInternalTransactionConstraints do
@moduledoc """
Use `priv/repo/migrations/scripts/20181108205650_additional_internal_transaction_constraints.sql` to migrate data and
validate constraint.
```sh
mix ecto.migrate
psql -d $DATABASE -a -f priv/repo/migrations/scripts/20181108205650_additional_internal_transaction_constraints.sql
```
"""
use Ecto.Migration
def up do
create(constraint(:internal_transactions, :call_has_call_type, check: "type != 'call' OR call_type IS NOT NULL"))
create(constraint(:internal_transactions, :call_has_input, check: "type != 'call' OR input IS NOT NULL"))
create(constraint(:internal_transactions, :create_has_init, check: "type != 'create' OR init IS NOT NULL"))
execute("""
ALTER TABLE internal_transactions
ADD CONSTRAINT call_has_call_type
CHECK (type != 'call' OR call_type IS NOT NULL)
NOT VALID
""")
execute("""
ALTER TABLE internal_transactions
ADD CONSTRAINT call_has_input
CHECK (type != 'call' OR input IS NOT NULL)
NOT VALID
""")
execute("""
ALTER TABLE internal_transactions
ADD CONSTRAINT create_has_init
CHECK (type != 'create' OR init IS NOT NULL)
NOT VALID
""")
end
def down do

@ -0,0 +1,64 @@
DO $$
DECLARE
row_count integer := 1;
batch_size integer := 50000; -- HOW MANY ITEMS WILL BE UPDATED AT TIME
iterator integer := batch_size;
max_row_number integer;
next_iterator integer;
updated_row_count integer;
deleted_row_count integer;
BEGIN
DROP TABLE IF EXISTS current_suicide_internal_transactions_temp;
-- CREATES TEMP TABLE TO STORE DATA TO BE UPDATED
CREATE TEMP TABLE current_suicide_internal_transactions_temp(
transaction_hash bytea NOT NULL,
index bigint NOT NULL,
row_number integer
);
INSERT INTO current_suicide_internal_transactions_temp
SELECT DISTINCT ON (transaction_hash, index)
transaction_hash,
index,
ROW_NUMBER () OVER ()
FROM internal_transactions
WHERE type = 'suicide'
ORDER BY transaction_hash, index DESC;
max_row_number := (SELECT MAX(row_number) FROM current_suicide_internal_transactions_temp);
RAISE NOTICE '% items to be updated', max_row_number + 1;
-- ITERATES THROUGH THE ITEMS UNTIL THE TEMP TABLE IS EMPTY
WHILE iterator <= max_row_number LOOP
next_iterator := iterator + batch_size;
RAISE NOTICE '-> suicide internal transactions % to % to be updated', iterator, next_iterator - 1;
UPDATE internal_transactions
SET type = 'selfdestruct'
FROM current_suicide_internal_transactions_temp
WHERE internal_transactions.transaction_hash = current_suicide_internal_transactions_temp.transaction_hash AND
internal_transactions.index = current_suicide_internal_transactions_temp.index AND
current_suicide_internal_transactions_temp.row_number < next_iterator;
GET DIAGNOSTICS updated_row_count = ROW_COUNT;
RAISE NOTICE '-> % internal transactions updated from suicide to selfdesruct', updated_row_count;
DELETE FROM current_suicide_internal_transactions_temp
WHERE row_number < next_iterator;
GET DIAGNOSTICS deleted_row_count = ROW_COUNT;
ASSERT updated_row_count = deleted_row_count;
-- COMMITS THE BATCH UPDATES
CHECKPOINT;
-- UPDATES THE COUNTER SO IT DOESN'T TURN INTO AN INFINITE LOOP
iterator := next_iterator;
END LOOP;
RAISE NOTICE 'All suicide type internal transactions updated to selfdestruct. Validating constraint.';
ALTER TABLE internal_transactions VALIDATE CONSTRAINT selfdestruct_has_from_and_to_address;
END $$;

@ -0,0 +1,80 @@
DO $$
DECLARE
row_count integer := 1;
batch_size integer := 50000; -- HOW MANY ITEMS WILL BE UPDATED AT TIME
iterator integer := batch_size;
max_row_number integer;
next_iterator integer;
updated_transaction_count integer;
deleted_internal_transaction_count integer;
deleted_row_count integer;
BEGIN
DROP TABLE IF EXISTS transactions_with_deprecated_internal_transactions;
-- CREATES TEMP TABLE TO STORE DATA TO BE UPDATED
CREATE TEMP TABLE transactions_with_deprecated_internal_transactions(
hash bytea NOT NULL,
row_number integer
);
INSERT INTO transactions_with_deprecated_internal_transactions
SELECT DISTINCT ON (transaction_hash)
transaction_hash,
ROW_NUMBER () OVER ()
FROM internal_transactions
WHERE
-- call_has_call_type CONSTRAINT
(type = 'call' AND call_type IS NULL) OR
-- call_has_input CONSTRAINT
(type = 'call' AND input IS NULL) OR
-- create_has_init CONSTRAINT
(type = 'create' AND init is NULL)
ORDER BY transaction_hash DESC;
max_row_number := (SELECT MAX(row_number) FROM transactions_with_deprecated_internal_transactions);
RAISE NOTICE '% transactions to be updated', max_row_number + 1;
-- ITERATES THROUGH THE ITEMS UNTIL THE TEMP TABLE IS EMPTY
WHILE iterator <= max_row_number LOOP
next_iterator := iterator + batch_size;
RAISE NOTICE '-> transactions with deprecated internal transactions % to % to be updated', iterator, next_iterator - 1;
UPDATE transactions
SET internal_transactions_indexed_at = NULL,
error = NULL
FROM transactions_with_deprecated_internal_transactions
WHERE transactions.hash = transactions_with_deprecated_internal_transactions.hash AND
transactions_with_deprecated_internal_transactions.row_number < next_iterator;
GET DIAGNOSTICS updated_transaction_count = ROW_COUNT;
RAISE NOTICE '-> % transactions updated to refetch internal transactions', updated_transaction_count;
DELETE FROM internal_transactions
USING transactions_with_deprecated_internal_transactions
WHERE internal_transactions.transaction_hash = transactions_with_deprecated_internal_transactions.hash AND
transactions_with_deprecated_internal_transactions.row_number < next_iterator;
GET DIAGNOSTICS deleted_internal_transaction_count = ROW_COUNT;
RAISE NOTICE '-> % internal transactions deleted', deleted_internal_transaction_count;
DELETE FROM transactions_with_deprecated_internal_transactions
WHERE row_number < next_iterator;
GET DIAGNOSTICS deleted_row_count = ROW_COUNT;
ASSERT updated_transaction_count = deleted_row_count;
-- COMMITS THE BATCH UPDATES
CHECKPOINT;
-- UPDATES THE COUNTER SO IT DOESN'T TURN INTO AN INFINITE LOOP
iterator := next_iterator;
END LOOP;
RAISE NOTICE 'All deprecated internal transactions will be refetched. Validating constraints.';
ALTER TABLE internal_transactions VALIDATE CONSTRAINT call_has_call_type;
ALTER TABLE internal_transactions VALIDATE CONSTRAINT call_has_input;
ALTER TABLE internal_transactions VALIDATE CONSTRAINT create_has_init;
END $$;
Loading…
Cancel
Save