Merge branch 'master' into sa-ganache-ignore-internal-transactions

pull/1160/head
Andrew Cravenho 6 years ago committed by GitHub
commit 4ec8ba3bec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 30
      .circleci/config.yml
  2. 80
      apps/block_scout_web/assets/__tests__/lib/async_listing_load.js
  3. 69
      apps/block_scout_web/assets/__tests__/pages/address.js
  4. 4
      apps/block_scout_web/assets/css/app.scss
  5. 19
      apps/block_scout_web/assets/css/components/_highlight.scss
  6. 1
      apps/block_scout_web/assets/js/app.js
  7. 268
      apps/block_scout_web/assets/js/lib/async_listing_load.js
  8. 9
      apps/block_scout_web/assets/js/lib/smart_contract/code_highlighting.js
  9. 67
      apps/block_scout_web/assets/js/pages/address.js
  10. 33
      apps/block_scout_web/assets/package-lock.json
  11. 3
      apps/block_scout_web/assets/package.json
  12. 3
      apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex
  13. 7
      apps/block_scout_web/lib/block_scout_web/templates/address_contract/index.html.eex
  14. 2
      apps/block_scout_web/lib/block_scout_web/templates/address_internal_transaction/index.html.eex
  15. 2
      apps/block_scout_web/lib/block_scout_web/templates/address_token_transfer/index.html.eex
  16. 2
      apps/block_scout_web/lib/block_scout_web/templates/tokens/holder/index.html.eex
  17. 2
      apps/block_scout_web/lib/block_scout_web/templates/tokens/transfer/index.html.eex
  18. 16
      apps/block_scout_web/lib/block_scout_web/views/address_contract_view.ex
  19. 8
      apps/block_scout_web/priv/gettext/default.pot
  20. 8
      apps/block_scout_web/priv/gettext/en/LC_MESSAGES/default.po
  21. 121
      apps/block_scout_web/test/block_scout_web/controllers/tokens/inventory_controller_test.ex
  22. 55
      apps/block_scout_web/test/block_scout_web/views/address_contract_view_test.exs
  23. 19
      apps/explorer/lib/explorer/chain/internal_transaction/type.ex
  24. 4
      apps/explorer/lib/explorer/chain/token_transfer.ex
  25. 93
      apps/explorer/lib/release_tasks.ex
  26. 31
      apps/explorer/priv/repo/migrations/20181107164103_eip6.exs
  27. 33
      apps/explorer/priv/repo/migrations/20181108205650_additional_internal_transaction_constraints.exs
  28. 64
      apps/explorer/priv/repo/migrations/scripts/20181107164103_eip6.sql
  29. 80
      apps/explorer/priv/repo/migrations/scripts/20181108205650_additional_internal_transaction_constraints.sql
  30. 2
      apps/explorer/test/explorer/chain_test.exs
  31. 4
      mix.exs
  32. 2
      mix.lock
  33. 3
      rel/commands/migrate.sh
  34. 3
      rel/commands/seed.sh
  35. 85
      rel/config.exs
  36. 3
      rel/plugins/.gitignore
  37. 30
      rel/vm.args

@ -122,6 +122,7 @@ jobs:
- mix.exs
- mix.lock
- appspec.yml
- rel
check_formatted:
docker:
# Ensure .tool-versions matches
@ -279,6 +280,32 @@ jobs:
name: Jest
command: ./node_modules/.bin/jest
working_directory: apps/block_scout_web/assets
release:
docker:
# Ensure .tool-versions matches
- image: circleci/elixir:1.7.2
environment:
MIX_ENV: prod
working_directory: ~/app
steps:
- attach_workspace:
at: .
- run: mix local.hex --force
- run: mix local.rebar --force
- run: mix release --verbose --env prod
- run:
name: Collecting artifacts
command: |
find -name 'blockscout.tar.gz' -exec sh -c 'mkdir -p ci_artifact && cp "$@" ci_artifact/ci_artifact_blockscout.tar.gz' _ {} +
when: always
- store_artifacts:
name: Uploading CI artifacts
path: ci_artifact/ci_artifact_blockscout.tar.gz
destination: ci_artifact_blockscout.tar.gz
sobelow:
docker:
# Ensure .tool-versions matches
@ -561,6 +588,9 @@ workflows:
- jest:
requires:
- build
- release:
requires:
- build
- sobelow:
requires:
- build

@ -0,0 +1,80 @@
import { asyncReducer, asyncInitialState } from '../../js/lib/async_listing_load'
describe('ELEMENTS_LOAD', () => {
test('sets only nextPagePath and ignores other keys', () => {
const state = Object.assign({}, asyncInitialState)
const action = { type: 'ELEMENTS_LOAD', nextPagePath: 'set', foo: 1 }
const output = asyncReducer(state, action)
expect(output.foo).not.toEqual(1)
expect(output.nextPagePath).toEqual('set')
})
})
describe('ADD_ITEM_KEY', () => {
test('sets itemKey to what was passed in the action', () => {
const expectedItemKey = 'expected.Key'
const state = Object.assign({}, asyncInitialState)
const action = { type: 'ADD_ITEM_KEY', itemKey: expectedItemKey }
const output = asyncReducer(state, action)
expect(output.itemKey).toEqual(expectedItemKey)
})
})
describe('START_REQUEST', () => {
test('sets loading status to true', () => {
const state = Object.assign({}, asyncInitialState, { loading: false })
const action = { type: 'START_REQUEST' }
const output = asyncReducer(state, action)
expect(output.loading).toEqual(true)
})
})
describe('REQUEST_ERROR', () => {
test('sets requestError to true', () => {
const state = Object.assign({}, asyncInitialState, { requestError: false })
const action = { type: 'REQUEST_ERROR' }
const output = asyncReducer(state, action)
expect(output.requestError).toEqual(true)
})
})
describe('FINISH_REQUEST', () => {
test('sets loading status to false', () => {
const state = Object.assign({}, asyncInitialState, {
loading: true,
loadingFirstPage: true
})
const action = { type: 'FINISH_REQUEST' }
const output = asyncReducer(state, action)
expect(output.loading).toEqual(false)
expect(output.loadingFirstPage).toEqual(false)
})
})
describe('ITEMS_FETCHED', () => {
test('sets the items to what was passed in the action', () => {
const expectedItems = [1, 2, 3]
const state = Object.assign({}, asyncInitialState)
const action = { type: 'ITEMS_FETCHED', items: expectedItems }
const output = asyncReducer(state, action)
expect(output.items).toEqual(expectedItems)
})
})
describe('NAVIGATE_TO_OLDER', () => {
test('sets beyondPageOne to true', () => {
const state = Object.assign({}, asyncInitialState, { beyondPageOne: false })
const action = { type: 'NAVIGATE_TO_OLDER' }
const output = asyncReducer(state, action)
expect(output.beyondPageOne).toEqual(true)
})
})

@ -172,70 +172,9 @@ describe('RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH', () => {
})
})
describe('RECEIVED_NEW_PENDING_TRANSACTION', () => {
test('with new pending transaction', () => {
const state = Object.assign({}, initialState, {
pendingTransactions: [{ transactionHash: 1, transactionHtml: 'test 1' }]
})
const action = {
type: 'RECEIVED_NEW_PENDING_TRANSACTION',
msg: { transactionHash: 2, transactionHtml: 'test 2' }
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([
{ transactionHash: 2, transactionHtml: 'test 2' },
{ transactionHash: 1, transactionHtml: 'test 1' }
])
})
test('when channel has been disconnected', () => {
const state = Object.assign({}, initialState, {
channelDisconnected: true,
pendingTransactions: [{ transactionHash: 1, transactionHtml: 'test 1' }]
})
const action = {
type: 'RECEIVED_NEW_PENDING_TRANSACTION',
msg: { transactionHash: 2, transactionHtml: 'test 2' }
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([
{ transactionHash: 1, transactionHtml: 'test 1' }
])
})
test('beyond page one', () => {
const state = Object.assign({}, initialState, {
beyondPageOne: true,
pendingTransactions: [{ transactionHash: 1, transactionHtml: 'test 1' }]
})
const action = {
type: 'RECEIVED_NEW_PENDING_TRANSACTION',
msg: { transactionHash: 2, transactionHtml: 'test 2' }
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([
{ transactionHash: 1, transactionHtml: 'test 1' }
])
})
test('with filtered out pending transaction', () => {
const state = Object.assign({}, initialState, {
filter: 'to'
})
const action = {
type: 'RECEIVED_NEW_PENDING_TRANSACTION',
msg: { transactionHash: 2, transactionHtml: 'test 2' }
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([])
})
})
describe('RECEIVED_NEW_TRANSACTION', () => {
test('with new transaction', () => {
const state = Object.assign({}, initialState, {
pendingTransactions: [{ transactionHash: 2, transactionHtml: 'test' }],
transactions: [{ transactionHash: 1, transactionHtml: 'test 1' }]
})
const action = {
@ -244,9 +183,6 @@ describe('RECEIVED_NEW_TRANSACTION', () => {
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([
{ transactionHash: 2, transactionHtml: 'test 2', validated: true }
])
expect(output.transactions).toEqual([
{ transactionHash: 2, transactionHtml: 'test 2' },
{ transactionHash: 1, transactionHtml: 'test 1' }
@ -255,7 +191,6 @@ describe('RECEIVED_NEW_TRANSACTION', () => {
test('when channel has been disconnected', () => {
const state = Object.assign({}, initialState, {
channelDisconnected: true,
pendingTransactions: [{ transactionHash: 2, transactionHtml: 'test' }],
transactions: [{ transactionHash: 1, transactionHtml: 'test 1' }]
})
const action = {
@ -264,9 +199,6 @@ describe('RECEIVED_NEW_TRANSACTION', () => {
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([
{ transactionHash: 2, transactionHtml: 'test' }
])
expect(output.transactions).toEqual([
{ transactionHash: 1, transactionHtml: 'test 1' }
])
@ -282,7 +214,6 @@ describe('RECEIVED_NEW_TRANSACTION', () => {
}
const output = reducer(state, action)
expect(output.pendingTransactions).toEqual([])
expect(output.transactions).toEqual([
{ transactionHash: 1, transactionHtml: 'test 1' }
])

@ -47,6 +47,9 @@ $fa-font-path: "~@fortawesome/fontawesome-free/webfonts";
@import "node_modules/bootstrap/scss/badge";
@import "node_modules/bootstrap/scss/alert";
// Code highlight
@import "node_modules/highlight.js/styles/default";
//Custom theme
@import "theme/fonts";
@ -82,6 +85,7 @@ $fa-font-path: "~@fortawesome/fontawesome-free/webfonts";
@import "components/dropdown";
@import "components/loading-spinner";
@import "components/transaction-input";
@import "components/highlight";
:export {
primary: $primary;

@ -0,0 +1,19 @@
//replace the default background color from highlightjs
.hljs {
background: $gray-100;
}
.line-numbers {
[data-line-number] {
&:before {
content: attr(data-line-number);
display: inline-block;
border-right: 1px solid $gray-400;
padding: 0 .5em;
margin-right: .5em;
color: $gray-600
}
}
}

@ -38,6 +38,7 @@ import './lib/market_history_chart'
import './lib/pending_transactions_toggle'
import './lib/pretty_json'
import './lib/reload_button'
import './lib/smart_contract/code_highlighting'
import './lib/smart_contract/read_only_functions'
import './lib/smart_contract/wei_ether_converter'
import './lib/stop_propagation'

@ -1,85 +1,223 @@
import $ from 'jquery'
import _ from 'lodash'
import URI from 'urijs'
import humps from 'humps'
import listMorph from '../lib/list_morph'
import reduceReducers from 'reduce-reducers'
import { createStore, connectElements } from '../lib/redux_helpers.js'
/**
* This script is a generic function to load list within a tab async. See token transfers tab at Token's page as example.
* This is a generic lib to add pagination with asynchronous page loading. There are two ways of
* activating this in a page.
*
* If the page has no redux associated with, all you need is a markup with the following pattern:
*
* <div data-async-load data-async-listing="firstLoadPath">
* <div data-loading-message> message </div>
* <div data-empty-response-message style="display: none;"> message </div>
* <div data-error-message style="display: none;"> message </div>
* <div data-items></div>
* <a data-next-page-button style="display: none;"> button text </a>
* <div data-loading-button style="display: none;"> loading text </div>
* </div>
*
* the data-async-load is the attribute responsible for binding the store.
*
* If the page has a redux associated with, you need to connect the reducers instead of creating
* the store using the `createStore`. For instance:
*
* To get it working the markup must follow the pattern below:
* // my_page.js
* const initialState = { ... }
* const reducer = (state, action) => { ... }
* const store = createAsyncLoadStore(reducer, initialState, 'item.Key')
*
* <div data-async-listing="path">
* <div data-loading-message> message </div>
* <div data-empty-response-message style="display: none;"> message </div>
* <div data-error-message style="display: none;"> message </div>
* <div data-items></div>
* <a data-next-page-button style="display: none;"> button text </a>
* <div data-loading-button style="display: none;"> loading text </div>
* </div>
* The createAsyncLoadStore function will return a store with asynchronous loading activated. This
* approach will expect the same markup above, except for data-async-load attribute, which is used
* to create a store and it is not necessary for this case.
*
*/
const $element = $('[data-async-listing]')
function asyncListing (element, path) {
const $mainElement = $(element)
const $items = $mainElement.find('[data-items]')
const $loading = $mainElement.find('[data-loading-message]')
const $nextPageButton = $mainElement.find('[data-next-page-button]')
const $loadingButton = $mainElement.find('[data-loading-button]')
const $errorMessage = $mainElement.find('[data-error-message]')
const $emptyResponseMessage = $mainElement.find('[data-empty-response-message]')
$.getJSON(path, {type: 'JSON'})
.done(response => {
if (!response.items || response.items.length === 0) {
$emptyResponseMessage.show()
$items.empty()
} else {
$items.html(response.items)
export const asyncInitialState = {
/* it will consider any query param in the current URI as paging */
beyondPageOne: (URI(window.location).query() !== ''),
/* an array with every html element of the list being shown */
items: [],
/* the key for diffing the elements in the items array */
itemKey: null,
/* represents whether a request is happening or not */
loading: false,
/* if there was an error fetching items */
requestError: false,
/* if it is loading the first page */
loadingFirstPage: true,
/* link to the next page */
nextPagePath: null
}
export function asyncReducer (state = asyncInitialState, action) {
switch (action.type) {
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, { nextPagePath: action.nextPagePath })
}
case 'ADD_ITEM_KEY': {
return Object.assign({}, state, { itemKey: action.itemKey })
}
case 'START_REQUEST': {
return Object.assign({}, state, {
loading: true,
requestError: false
})
}
case 'REQUEST_ERROR': {
return Object.assign({}, state, { requestError: true })
}
case 'FINISH_REQUEST': {
return Object.assign({}, state, {
loading: false,
loadingFirstPage: false
})
}
case 'ITEMS_FETCHED': {
return Object.assign({}, state, {
requestError: false,
items: action.items,
nextPagePath: action.nextPagePath
})
}
case 'NAVIGATE_TO_OLDER': {
history.replaceState({}, null, state.nextPagePath)
return Object.assign({}, state, { beyondPageOne: true })
}
default:
return state
}
}
export const elements = {
'[data-async-listing]': {
load ($el) {
const nextPagePath = $el.data('async-listing')
return { nextPagePath }
}
},
'[data-async-listing] [data-loading-message]': {
render ($el, state) {
if (state.loadingFirstPage) return $el.show()
$el.hide()
}
},
'[data-async-listing] [data-empty-response-message]': {
render ($el, state) {
if (
!state.requestError &&
(!state.loading || !state.loadingFirstPage) &&
state.items.length === 0
) {
return $el.show()
}
if (response.next_page_path) {
$nextPageButton.attr('href', response.next_page_path)
$nextPageButton.show()
} else {
$nextPageButton.hide()
$el.hide()
}
},
'[data-async-listing] [data-error-message]': {
render ($el, state) {
if (state.requestError) return $el.show()
$el.hide()
}
},
'[data-async-listing] [data-items]': {
render ($el, state, oldState) {
if (state.items === oldState.items) return
if (state.itemKey) {
const container = $el[0]
const newElements = _.map(state.items, (item) => $(item)[0])
listMorph(container, newElements, { key: state.itemKey })
return
}
})
.fail(() => $errorMessage.show())
.always(() => {
$loading.hide()
$loadingButton.hide()
})
}
if ($element.length === 1) {
$element.on('click', '[data-next-page-button]', (event) => {
event.preventDefault()
$el.html(state.items)
}
},
'[data-async-listing] [data-next-page-button]': {
render ($el, state) {
if (state.requestError) return $el.hide()
if (!state.nextPagePath) return $el.hide()
if (state.loading) return $el.hide()
const $button = $(event.target)
const path = $button.attr('href')
const $loadingButton = $element.find('[data-loading-button]')
$el.show()
$el.attr('href', state.nextPagePath)
}
},
'[data-async-listing] [data-loading-button]': {
render ($el, state) {
if (!state.loadingFirstPage && state.loading) return $el.show()
// change url to the next page link before loading the next page
history.pushState({}, null, path)
$button.hide()
$loadingButton.show()
$el.hide()
}
}
}
asyncListing($element, path)
})
/**
* Create a store combining the given reducer and initial state with the async reducer.
*
* reducer: The reducer that will be merged with the asyncReducer to add async
* loading capabilities to a page. Any state changes in the reducer passed will be
* applied AFTER the asyncReducer.
*
* initialState: The initial state to be merged with the async state. Any state
* values passed here will overwrite the values on asyncInitialState.
*
* itemKey: it will be added to the state as the key for diffing the elements and
* adding or removing with the correct animation. Check list_morph.js for more informantion.
*/
export function createAsyncLoadStore (reducer, initialState, itemKey) {
const state = _.merge(asyncInitialState, initialState)
const store = createStore(reduceReducers(asyncReducer, reducer, state))
$element.on('click', '[data-error-message]', (event) => {
event.preventDefault()
if (typeof itemKey !== 'undefined') {
store.dispatch({
type: 'ADD_ITEM_KEY',
itemKey
})
}
// event.target had a weird behavior here
// it hid the <a> tag but left the red div showing
const $link = $element.find('[data-error-message]')
const $loading = $element.find('[data-loading-message]')
const path = $element.data('async-listing')
connectElements({store, elements})
firstPageLoad(store)
return store
}
$link.hide()
$loading.show()
function firstPageLoad (store) {
const $element = $('[data-async-listing]')
function loadItems () {
const path = store.getState().nextPagePath
store.dispatch({type: 'START_REQUEST'})
$.getJSON(path, {type: 'JSON'})
.done(response => store.dispatch(Object.assign({type: 'ITEMS_FETCHED'}, humps.camelizeKeys(response))))
.fail(() => store.dispatch({type: 'REQUEST_ERROR'}))
.always(() => store.dispatch({type: 'FINISH_REQUEST'}))
}
loadItems()
asyncListing($element, path)
$element.on('click', '[data-error-message]', (event) => {
event.preventDefault()
loadItems()
})
// force browser to reload when the user goes back a page
$(window).on('popstate', () => location.reload())
$element.on('click', '[data-next-page-button]', (event) => {
event.preventDefault()
loadItems()
store.dispatch({type: 'NAVIGATE_TO_OLDER'})
})
}
asyncListing($element, $element.data('async-listing'))
const $element = $('[data-async-load]')
if ($element.length) {
const store = createStore(asyncReducer)
connectElements({store, elements})
firstPageLoad(store)
}

@ -0,0 +1,9 @@
import $ from 'jquery'
import hljs from 'highlight.js'
import hljsDefineSolidity from 'highlightjs-solidity'
// only activate highlighting on pages with this selector
if ($('[data-activate-highlight]').length > 0) {
hljsDefineSolidity(hljs)
hljs.initHighlightingOnLoad()
}

@ -12,7 +12,6 @@ import { updateAllCalculatedUsdValues } from '../lib/currency.js'
import { loadTokenBalanceDropdown } from '../lib/token_balance_dropdown'
const BATCH_THRESHOLD = 10
const TRANSACTION_VALIDATED_MOVE_DELAY = 1000
export const initialState = {
channelDisconnected: false,
@ -24,7 +23,6 @@ export const initialState = {
transactionCount: null,
validationCount: null,
pendingTransactions: [],
transactions: [],
internalTransactions: [],
internalTransactionsBatch: [],
@ -91,26 +89,6 @@ function baseReducer (state = initialState, action) {
})
}
}
case 'RECEIVED_NEW_PENDING_TRANSACTION': {
if (state.channelDisconnected || state.beyondPageOne) return state
if ((state.filter === 'to' && action.msg.toAddressHash !== state.addressHash) ||
(state.filter === 'from' && action.msg.fromAddressHash !== state.addressHash)) {
return state
}
return Object.assign({}, state, {
pendingTransactions: [
action.msg,
...state.pendingTransactions
]
})
}
case 'REMOVE_PENDING_TRANSACTION': {
return Object.assign({}, state, {
pendingTransactions: state.pendingTransactions.filter((transaction) => action.msg.transactionHash !== transaction.transactionHash)
})
}
case 'RECEIVED_NEW_TRANSACTION': {
if (state.channelDisconnected) return state
@ -123,7 +101,6 @@ function baseReducer (state = initialState, action) {
}
return Object.assign({}, state, {
pendingTransactions: state.pendingTransactions.map((transaction) => action.msg.transactionHash === transaction.transactionHash ? Object.assign({}, action.msg, { validated: true }) : transaction),
transactions: [
action.msg,
...state.transactions
@ -184,28 +161,6 @@ const elements = {
$el.empty().append(numeral(state.validationCount).format())
}
},
'[data-selector="pending-transactions-list"]': {
load ($el) {
return {
pendingTransactions: $el.children().map((index, el) => ({
transactionHash: el.dataset.transactionHash,
transactionHtml: el.outerHTML
})).toArray()
}
},
render ($el, state, oldState) {
if (oldState.pendingTransactions === state.pendingTransactions) return
const container = $el[0]
const newElements = _.map(state.pendingTransactions, ({ transactionHtml }) => $(transactionHtml)[0])
listMorph(container, newElements, { key: 'dataset.transactionHash' })
}
},
'[data-selector="pending-transactions-count"]': {
render ($el, state, oldState) {
if (oldState.pendingTransactions === state.pendingTransactions) return
$el[0].innerHTML = numeral(state.pendingTransactions.filter(({ validated }) => !validated).length).format()
}
},
'[data-selector="empty-transactions-list"]': {
render ($el, state) {
if (state.transactions.length || state.loadingNextPage || state.pagingError) {
@ -226,16 +181,10 @@ const elements = {
},
render ($el, state, oldState) {
if (oldState.transactions === state.transactions) return
function updateTransactions () {
const container = $el[0]
const newElements = _.map(state.transactions, ({ transactionHtml }) => $(transactionHtml)[0])
listMorph(container, newElements, { key: 'dataset.transactionHash' })
}
if ($('[data-selector="pending-transactions-list"]').is(':visible')) {
setTimeout(updateTransactions, TRANSACTION_VALIDATED_MOVE_DELAY + 400)
} else {
updateTransactions()
}
const container = $el[0]
const newElements = _.map(state.transactions, ({ transactionHtml }) => $(transactionHtml)[0])
return listMorph(container, newElements, { key: 'dataset.transactionHash' })
}
},
'[data-selector="internal-transactions-list"]': {
@ -306,19 +255,11 @@ if ($addressDetailsPage.length) {
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH',
msgs: humps.camelizeKeys(msgs)
})))
addressChannel.on('pending_transaction', (msg) => store.dispatch({
type: 'RECEIVED_NEW_PENDING_TRANSACTION',
msg: humps.camelizeKeys(msg)
}))
addressChannel.on('transaction', (msg) => {
store.dispatch({
type: 'RECEIVED_NEW_TRANSACTION',
msg: humps.camelizeKeys(msg)
})
setTimeout(() => store.dispatch({
type: 'REMOVE_PENDING_TRANSACTION',
msg: humps.camelizeKeys(msg)
}), TRANSACTION_VALIDATED_MOVE_DELAY)
})
const blocksChannel = socket.channel(`blocks:${addressHash}`, {})

@ -497,7 +497,7 @@
},
"array-equal": {
"version": "1.0.0",
"resolved": "http://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz",
"resolved": "https://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz",
"integrity": "sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM=",
"dev": true
},
@ -935,7 +935,7 @@
},
"babel-plugin-istanbul": {
"version": "4.1.6",
"resolved": "http://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.6.tgz",
"resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.6.tgz",
"integrity": "sha512-PWP9FQ1AhZhS01T/4qLSKoHGY/xvkZdVBGlKM/HuxxS3+sC66HhTNR7+MpbO/so/cz/wY94MeSWJuP1hXIPfwQ==",
"dev": true,
"requires": {
@ -965,7 +965,7 @@
},
"babel-plugin-syntax-object-rest-spread": {
"version": "6.13.0",
"resolved": "http://registry.npmjs.org/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz",
"resolved": "https://registry.npmjs.org/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz",
"integrity": "sha1-/WU28rzhODb/o6VFjEkDpZe7O/U=",
"dev": true
},
@ -4737,6 +4737,16 @@
"minimalistic-assert": "^1.0.0"
}
},
"highlight.js": {
"version": "9.13.1",
"resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-9.13.1.tgz",
"integrity": "sha512-Sc28JNQNDzaH6PORtRLMvif9RSn1mYuOoX3omVjnb0+HbpPygU2ALBI0R/wsiqCb4/fcp07Gdo8g+fhtFrQl6A=="
},
"highlightjs-solidity": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/highlightjs-solidity/-/highlightjs-solidity-1.0.6.tgz",
"integrity": "sha512-NzdwI5gX+8H3z/YEXk01dKOY0QuffhNkUZw9umHUCXlzKB+1n2SexTTZpSGAmZYetHT/bccCm+3QqBULtTLmdA=="
},
"hmac-drbg": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz",
@ -5901,7 +5911,7 @@
},
"jest-get-type": {
"version": "22.4.3",
"resolved": "http://registry.npmjs.org/jest-get-type/-/jest-get-type-22.4.3.tgz",
"resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-22.4.3.tgz",
"integrity": "sha512-/jsz0Y+V29w1chdXVygEKSz2nBoHoYqNShPe+QgxSNjAuP1i8+k4LbQNrfoliKej0P45sivkSCh7yiD6ubHS3w==",
"dev": true
},
@ -6105,7 +6115,7 @@
"dependencies": {
"callsites": {
"version": "2.0.0",
"resolved": "http://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz",
"integrity": "sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=",
"dev": true
}
@ -9791,6 +9801,11 @@
}
}
},
"reduce-reducers": {
"version": "0.4.3",
"resolved": "https://registry.npmjs.org/reduce-reducers/-/reduce-reducers-0.4.3.tgz",
"integrity": "sha512-+CNMnI8QhgVMtAt54uQs3kUxC3Sybpa7Y63HR14uGLgI9/QR5ggHvpxwhGGe3wmx5V91YwqQIblN9k5lspAmGw=="
},
"redux": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/redux/-/redux-4.0.0.tgz",
@ -10163,7 +10178,7 @@
"dependencies": {
"minimist": {
"version": "1.2.0",
"resolved": "http://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=",
"dev": true
}
@ -11129,7 +11144,7 @@
},
"load-json-file": {
"version": "1.1.0",
"resolved": "http://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz",
"resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz",
"integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=",
"dev": true,
"requires": {
@ -11183,7 +11198,7 @@
},
"pify": {
"version": "2.3.0",
"resolved": "http://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
"resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
"integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=",
"dev": true
},
@ -11731,7 +11746,7 @@
"dependencies": {
"minimist": {
"version": "1.2.0",
"resolved": "http://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=",
"dev": true
}

@ -20,6 +20,8 @@
},
"dependencies": {
"@fortawesome/fontawesome-free": "^5.1.0-4",
"highlight.js": "^9.13.1",
"highlightjs-solidity": "^1.0.6",
"bignumber.js": "^7.2.1",
"bootstrap": "^4.1.3",
"chart.js": "^2.7.2",
@ -34,6 +36,7 @@
"phoenix": "file:../../../deps/phoenix",
"phoenix_html": "file:../../../deps/phoenix_html",
"popper.js": "^1.14.3",
"reduce-reducers": "^0.4.3",
"redux": "^4.0.0",
"urijs": "^1.19.1"
},

@ -8,7 +8,7 @@ defmodule BlockScoutWeb.AddressChannel do
alias Explorer.Chain.Hash
alias Phoenix.View
intercept(["balance_update", "count", "internal_transaction", "pending_transaction", "transaction"])
intercept(["balance_update", "count", "internal_transaction", "transaction"])
def join("addresses:" <> _address_hash, _params, socket) do
{:ok, %{}, socket}
@ -62,7 +62,6 @@ defmodule BlockScoutWeb.AddressChannel do
end
def handle_out("transaction", data, socket), do: handle_transaction(data, socket, "transaction")
def handle_out("pending_transaction", data, socket), do: handle_transaction(data, socket, "pending_transaction")
def handle_transaction(%{address: address, transaction: transaction}, socket, event) do
Gettext.put_locale(BlockScoutWeb.Gettext, socket.assigns.locale)

@ -40,8 +40,7 @@
</button>
</div>
<div class="tile tile-muted mb-4">
<pre class="pre-scrollable"><code><%= text_to_html(@address.smart_contract.contract_source_code, wrapper_tag: :span, insert_brs: false) %></code>
</pre>
<pre class="pre-scrollable line-numbers" data-activate-highlight><code class="solidity"><%= for {line, number} <- contract_lines_with_index(@address.smart_contract.contract_source_code) do %><div data-line-number="<%= number %>"><%= line %></div><% end %></code></pre>
</div>
</section>
@ -53,7 +52,7 @@
</button>
</div>
<div class="tile tile-muted mb-4">
<pre class="pre-wrap pre-scrollable"><code><%= format_smart_contract_abi(@address.smart_contract.abi) %></code>
<pre class="pre-wrap pre-scrollable"><code class="nohighlight"><%= format_smart_contract_abi(@address.smart_contract.abi) %></code>
</pre>
</div>
</section>
@ -67,7 +66,7 @@
</button>
</div>
<div class="tile tile-muted">
<pre class="pre-wrap pre-scrollable"><code><%= @address.contract_code %></code></pre>
<pre class="pre-wrap pre-scrollable"><code class="nohighlight"><%= @address.contract_code %></code></pre>
</div>
</section>

@ -7,7 +7,7 @@
<%= render BlockScoutWeb.AddressView, "_tabs.html", assigns %>
</div>
<div class="card-body" data-async-listing="<%= @current_path %>">
<div class="card-body" data-async-load data-async-listing="<%= @current_path %>">
<div data-selector="channel-batching-message" style="display: none;">
<div data-selector="reload-button" class="alert alert-info">
<a href="#" class="alert-link"><span data-selector="channel-batching-count"></span> <%= gettext "More internal transactions have come in" %></a>

@ -7,7 +7,7 @@
<%= render BlockScoutWeb.AddressView, "_tabs.html", assigns %>
</div>
<div data-async-listing="<%= @current_path %>" class="card-body">
<div data-async-load data-async-listing="<%= @current_path %>" class="card-body">
<h2 class="card-title">
<span class="text-muted"><%= gettext "Tokens" %></span> / <%= token_name(@token) %>
</h2>

@ -16,7 +16,7 @@
</div>
<!-- Token Holders -->
<div class="card-body" data-async-listing="<%= @current_path %>">
<div class="card-body" data-async-load data-async-listing="<%= @current_path %>">
<h2 class="card-title"><%= gettext "Token Holders" %></h2>
<button data-error-message class="alert alert-danger col-12 text-left" style="display: none;">

@ -15,7 +15,7 @@
<%= render OverviewView, "_tabs.html", assigns %>
</div>
<div class="card-body" data-async-listing="<%= @current_path %>">
<div class="card-body" data-async-load data-async-listing="<%= @current_path %>">
<h2 class="card-title"><%= gettext "Token Transfers" %></h2>
<button data-error-message class="alert alert-danger col-12 text-left" style="display: none;">
<span href="#" class="alert-link"><%= gettext("Something went wrong, click to reload.") %></span>

@ -14,4 +14,20 @@ defmodule BlockScoutWeb.AddressContractView do
"""
def format_optimization_text(true), do: gettext("true")
def format_optimization_text(false), do: gettext("false")
def contract_lines_with_index(contract_source_code) do
contract_lines = String.split(contract_source_code, "\n")
max_digits =
contract_lines
|> Enum.count()
|> Integer.digits()
|> Enum.count()
contract_lines
|> Enum.with_index(1)
|> Enum.map(fn {value, line} ->
{value, String.pad_leading(to_string(line), max_digits, " ")}
end)
end
end

@ -267,7 +267,7 @@ msgid "Connection Lost, click to load newer validations"
msgstr ""
#, elixir-format
#: lib/block_scout_web/templates/address_contract/index.html.eex:50
#: lib/block_scout_web/templates/address_contract/index.html.eex:49
msgid "Contract ABI"
msgstr ""
@ -300,7 +300,7 @@ msgid "Contract Name"
msgstr ""
#, elixir-format
#: lib/block_scout_web/templates/address_contract/index.html.eex:64
#: lib/block_scout_web/templates/address_contract/index.html.eex:63
msgid "Contract creation code"
msgstr ""
@ -324,8 +324,8 @@ msgstr ""
#, elixir-format
#: lib/block_scout_web/templates/address_contract/index.html.eex:39
#: lib/block_scout_web/templates/address_contract/index.html.eex:52
#: lib/block_scout_web/templates/address_contract/index.html.eex:66
#: lib/block_scout_web/templates/address_contract/index.html.eex:51
#: lib/block_scout_web/templates/address_contract/index.html.eex:65
msgid "Copy Code"
msgstr ""

@ -267,7 +267,7 @@ msgid "Connection Lost, click to load newer validations"
msgstr ""
#, elixir-format
#: lib/block_scout_web/templates/address_contract/index.html.eex:50
#: lib/block_scout_web/templates/address_contract/index.html.eex:49
msgid "Contract ABI"
msgstr ""
@ -300,7 +300,7 @@ msgid "Contract Name"
msgstr ""
#, elixir-format
#: lib/block_scout_web/templates/address_contract/index.html.eex:64
#: lib/block_scout_web/templates/address_contract/index.html.eex:63
msgid "Contract creation code"
msgstr ""
@ -324,8 +324,8 @@ msgstr ""
#, elixir-format
#: lib/block_scout_web/templates/address_contract/index.html.eex:39
#: lib/block_scout_web/templates/address_contract/index.html.eex:52
#: lib/block_scout_web/templates/address_contract/index.html.eex:66
#: lib/block_scout_web/templates/address_contract/index.html.eex:51
#: lib/block_scout_web/templates/address_contract/index.html.eex:65
msgid "Copy Code"
msgstr ""

@ -0,0 +1,121 @@
defmodule BlockScoutWeb.Tokens.InventoryControllerTest do
use BlockScoutWeb.ConnCase
describe "GET index/3" do
test "with invalid address hash", %{conn: conn} do
conn = get(conn, token_inventory_path(conn, :index, "invalid_address"))
assert html_response(conn, 404)
end
test "with a token that doesn't exist", %{conn: conn} do
address = build(:address)
conn = get(conn, token_inventory_path(conn, :index, address.hash))
assert html_response(conn, 404)
end
test "successfully renders the page", %{conn: conn} do
token_contract_address = insert(:contract_address)
token = insert(:token, type: "ERC-721", contract_address: token_contract_address)
transaction =
:transaction
|> insert()
|> with_block()
insert(
:token_transfer,
transaction: transaction,
token_contract_address: token_contract_address,
token: token
)
conn =
get(
conn,
token_inventory_path(conn, :index, token_contract_address.hash)
)
assert html_response(conn, 200)
end
test "returns next page of results based on last seen token balance", %{conn: conn} do
token = insert(:token, type: "ERC-721")
transaction =
:transaction
|> insert()
|> with_block()
second_page_token_balances =
Enum.map(
1..50,
&insert(
:token_transfer,
transaction: transaction,
token_contract_address: token.contract_address,
token: token,
token_id: &1 + 1000
)
)
conn =
get(conn, token_inventory_path(conn, :index, token.contract_address_hash), %{
"token_id" => "999"
})
assert Enum.map(conn.assigns.unique_tokens, & &1.token_id) == Enum.map(second_page_token_balances, & &1.token_id)
end
test "next_page_params exists if not on last page", %{conn: conn} do
token = insert(:token, type: "ERC-721")
transaction =
:transaction
|> insert()
|> with_block()
Enum.each(
1..51,
&insert(
:token_transfer,
transaction: transaction,
token_contract_address: token.contract_address,
token: token,
token_id: &1 + 1000
)
)
expected_next_page_params = %{
"token_id" => to_string(token.contract_address_hash),
"unique_token" => 1050
}
conn = get(conn, token_inventory_path(conn, :index, token.contract_address_hash))
assert conn.assigns.next_page_params == expected_next_page_params
end
test "next_page_params are empty if on last page", %{conn: conn} do
token = insert(:token, type: "ERC-721")
transaction =
:transaction
|> insert()
|> with_block()
insert(
:token_transfer,
transaction: transaction,
token_contract_address: token.contract_address,
token: token,
token_id: 1000
)
conn = get(conn, token_inventory_path(conn, :index, token.contract_address_hash))
refute conn.assigns.next_page_params
end
end
end

@ -14,4 +14,59 @@ defmodule BlockScoutWeb.AddressContractViewTest do
assert AddressContractView.format_optimization_text(false) == "false"
end
end
describe "contract_lines_with_index/1" do
test "returns a list of tuples containing two strings each" do
code = """
pragma solidity >=0.4.22 <0.6.0;
struct Proposal {
uint voteCount;
}
address chairperson;
mapping(address => Voter) voters;
Proposal[] proposals;
constructor(uint8 _numProposals) public {
chairperson = msg.sender;
voters[chairperson].weight = 1;
proposals.length = _numProposals;
}
"""
result = AddressContractView.contract_lines_with_index(code)
assert result == [
{"pragma solidity >=0.4.22 <0.6.0;", " 1"},
{"", " 2"},
{"struct Proposal {", " 3"},
{" uint voteCount;", " 4"},
{"}", " 5"},
{"", " 6"},
{"address chairperson;", " 7"},
{"mapping(address => Voter) voters;", " 8"},
{"Proposal[] proposals;", " 9"},
{"", "10"},
{"constructor(uint8 _numProposals) public {", "11"},
{" chairperson = msg.sender;", "12"},
{" voters[chairperson].weight = 1;", "13"},
{" proposals.length = _numProposals;", "14"},
{"}", "15"},
{"", "16"}
]
end
test "returns a list of tuples and the second element always has n chars with x lines" do
chars = 3
lines = 100
result = AddressContractView.contract_lines_with_index(Enum.join(1..lines, "\n"))
assert Enum.all?(result, fn {_, number} -> String.length(number) == chars end)
end
test "returns a list of tuples and the first element is just a line from the original string" do
result = AddressContractView.contract_lines_with_index("a\nb\nc\nd\ne")
assert Enum.map(result, fn {line, _number} -> line end) == ["a", "b", "c", "d", "e"]
end
end
end

@ -38,6 +38,13 @@ defmodule Explorer.Chain.InternalTransaction.Type do
iex> Explorer.Chain.InternalTransaction.Type.cast("selfdestruct")
{:ok, :selfdestruct}
Deprecated values are not allowed for incoming data.
iex> Explorer.Chain.InternalTransaction.Type.cast(:suicide)
:error
iex> Explorer.Chain.InternalTransaction.Type.cast("suicide")
:error
Unsupported `String.t` return an `:error`.
iex> Explorer.Chain.InternalTransaction.Type.cast("hard-fork")
@ -65,6 +72,11 @@ defmodule Explorer.Chain.InternalTransaction.Type do
iex> Explorer.Chain.InternalTransaction.Type.dump(:selfdestruct)
{:ok, "selfdestruct"}
Deprecated values are not allowed to be dumped to the database as old values should only be read, not written.
iex> Explorer.Chain.InternalTransaction.Type.dump(:suicide)
:error
Other atoms return an error
iex> Explorer.Chain.InternalTransaction.Type.dump(:other)
@ -91,6 +103,11 @@ defmodule Explorer.Chain.InternalTransaction.Type do
iex> Explorer.Chain.InternalTransaction.Type.load("selfdestruct")
{:ok, :selfdestruct}
Converts deprecated value on load to the corresponding `t:t/0`.
iex> Explorer.Chain.InternalTransaction.Type.load("suicide")
{:ok, :selfdestruct}
Other `t:String.t/0` return `:error`
iex> Explorer.Chain.InternalTransaction.Type.load("other")
@ -103,6 +120,8 @@ defmodule Explorer.Chain.InternalTransaction.Type do
def load("create"), do: {:ok, :create}
def load("reward"), do: {:ok, :reward}
def load("selfdestruct"), do: {:ok, :selfdestruct}
# deprecated
def load("suicide"), do: {:ok, :selfdestruct}
def load(_), do: :error
@doc """

@ -135,6 +135,10 @@ defmodule Explorer.Chain.TokenTransfer do
def page_token_transfer(query, %PagingOptions{key: nil}), do: query
def page_token_transfer(query, %PagingOptions{key: {token_id}}) do
where(query, [token_transfer], token_transfer.token_id > ^token_id)
end
def page_token_transfer(query, %PagingOptions{key: {block_number, log_index}}) do
where(
query,

@ -0,0 +1,93 @@
defmodule Explorer.ReleaseTasks do
@moduledoc """
Release tasks used to migrate or generate seeds.
"""
alias Ecto.Migrator
@start_apps [
:crypto,
:ssl,
:postgrex,
:ecto,
# If using Ecto 3.0 or higher
:ecto_sql
]
@repos Application.get_env(:blockscout, :ecto_repos, [Explorer.Repo])
def migrate(_argv) do
start_services()
run_migrations()
stop_services()
end
def seed(_argv) do
start_services()
run_migrations()
run_seeds()
stop_services()
end
defp start_services do
IO.puts("Starting dependencies..")
# Start apps necessary for executing migrations
Enum.each(@start_apps, &Application.ensure_all_started/1)
# Start the Repo(s) for app
IO.puts("Starting repos..")
# Switch pool_size to 2 for ecto > 3.0
Enum.each(@repos, & &1.start_link(pool_size: 1))
end
defp stop_services do
IO.puts("Success!")
:init.stop()
end
defp run_migrations do
Enum.each(@repos, &run_migrations_for/1)
end
defp run_migrations_for(repo) do
app = Keyword.get(repo.config, :otp_app)
IO.puts("Running migrations for #{app}")
migrations_path = priv_path_for(repo, "migrations")
Migrator.run(repo, migrations_path, :up, all: true)
end
defp run_seeds do
Enum.each(@repos, &run_seeds_for/1)
end
# sobelow_skip ["RCE.CodeModule"]
defp run_seeds_for(repo) do
# Run the seed script if it exists
seed_script = priv_path_for(repo, "seeds.exs")
if File.exists?(seed_script) do
IO.puts("Running seed script..")
Code.eval_file(seed_script)
end
end
defp priv_path_for(repo, filename) do
app = Keyword.get(repo.config, :otp_app)
repo_underscore =
repo
|> Module.split()
|> List.last()
|> Macro.underscore()
priv_dir = "#{:code.priv_dir(app)}"
Path.join([priv_dir, repo_underscore, filename])
end
end

@ -1,28 +1,31 @@
defmodule Explorer.Repo.Migrations.EIP6 do
@moduledoc """
Use `priv/repo/migrations/scripts/20181107164103_eip6.sql` to migrate data and validate constraint.
```sh
mix ecto.migrate
psql -d $DATABASE -a -f priv/repo/migrations/scripts/20181107164103_eip6.sql
```
"""
use Ecto.Migration
def up do
execute("ALTER TABLE internal_transactions DROP CONSTRAINT suicide_has_from_and_to_address_hashes")
execute("UPDATE internal_transactions SET type = 'selfdestruct' WHERE type = 'suicide'")
create(
constraint(
:internal_transactions,
:selfdestruct_has_from_and_to_address_hashes,
check: """
type != 'selfdestruct' OR
(from_address_hash IS NOT NULL AND gas IS NULL AND to_address_hash IS NOT NULL)
"""
)
)
# `NOT VALID` skips checking pre-existing rows. Use `priv/repo/migrations/scripts/20181107164103_eip6.sql` to
# migrate data and validate constraints
execute("""
ALTER TABLE internal_transactions
ADD CONSTRAINT selfdestruct_has_from_and_to_address
CHECK (type != 'selfdestruct' OR (from_address_hash IS NOT NULL AND gas IS NULL AND to_address_hash IS NOT NULL))
NOT VALID
""")
end
def down do
execute("ALTER TABLE internal_transactions DROP CONSTRAINT selfdestruct_has_from_and_to_address_hashes")
execute("UPDATE internal_transactions SET type = 'suicide' WHERE type = 'selfdestruct'")
create(
constraint(
:internal_transactions,

@ -1,10 +1,37 @@
defmodule Explorer.Repo.Migrations.AdditionalInternalTransactionConstraints do
@moduledoc """
Use `priv/repo/migrations/scripts/20181108205650_additional_internal_transaction_constraints.sql` to migrate data and
validate constraint.
```sh
mix ecto.migrate
psql -d $DATABASE -a -f priv/repo/migrations/scripts/20181108205650_additional_internal_transaction_constraints.sql
```
"""
use Ecto.Migration
def up do
create(constraint(:internal_transactions, :call_has_call_type, check: "type != 'call' OR call_type IS NOT NULL"))
create(constraint(:internal_transactions, :call_has_input, check: "type != 'call' OR input IS NOT NULL"))
create(constraint(:internal_transactions, :create_has_init, check: "type != 'create' OR init IS NOT NULL"))
execute("""
ALTER TABLE internal_transactions
ADD CONSTRAINT call_has_call_type
CHECK (type != 'call' OR call_type IS NOT NULL)
NOT VALID
""")
execute("""
ALTER TABLE internal_transactions
ADD CONSTRAINT call_has_input
CHECK (type != 'call' OR input IS NOT NULL)
NOT VALID
""")
execute("""
ALTER TABLE internal_transactions
ADD CONSTRAINT create_has_init
CHECK (type != 'create' OR init IS NOT NULL)
NOT VALID
""")
end
def down do

@ -0,0 +1,64 @@
DO $$
DECLARE
row_count integer := 1;
batch_size integer := 50000; -- HOW MANY ITEMS WILL BE UPDATED AT TIME
iterator integer := batch_size;
max_row_number integer;
next_iterator integer;
updated_row_count integer;
deleted_row_count integer;
BEGIN
DROP TABLE IF EXISTS current_suicide_internal_transactions_temp;
-- CREATES TEMP TABLE TO STORE DATA TO BE UPDATED
CREATE TEMP TABLE current_suicide_internal_transactions_temp(
transaction_hash bytea NOT NULL,
index bigint NOT NULL,
row_number integer
);
INSERT INTO current_suicide_internal_transactions_temp
SELECT DISTINCT ON (transaction_hash, index)
transaction_hash,
index,
ROW_NUMBER () OVER ()
FROM internal_transactions
WHERE type = 'suicide'
ORDER BY transaction_hash, index DESC;
max_row_number := (SELECT MAX(row_number) FROM current_suicide_internal_transactions_temp);
RAISE NOTICE '% items to be updated', max_row_number + 1;
-- ITERATES THROUGH THE ITEMS UNTIL THE TEMP TABLE IS EMPTY
WHILE iterator <= max_row_number LOOP
next_iterator := iterator + batch_size;
RAISE NOTICE '-> suicide internal transactions % to % to be updated', iterator, next_iterator - 1;
UPDATE internal_transactions
SET type = 'selfdestruct'
FROM current_suicide_internal_transactions_temp
WHERE internal_transactions.transaction_hash = current_suicide_internal_transactions_temp.transaction_hash AND
internal_transactions.index = current_suicide_internal_transactions_temp.index AND
current_suicide_internal_transactions_temp.row_number < next_iterator;
GET DIAGNOSTICS updated_row_count = ROW_COUNT;
RAISE NOTICE '-> % internal transactions updated from suicide to selfdesruct', updated_row_count;
DELETE FROM current_suicide_internal_transactions_temp
WHERE row_number < next_iterator;
GET DIAGNOSTICS deleted_row_count = ROW_COUNT;
ASSERT updated_row_count = deleted_row_count;
-- COMMITS THE BATCH UPDATES
CHECKPOINT;
-- UPDATES THE COUNTER SO IT DOESN'T TURN INTO AN INFINITE LOOP
iterator := next_iterator;
END LOOP;
RAISE NOTICE 'All suicide type internal transactions updated to selfdestruct. Validating constraint.';
ALTER TABLE internal_transactions VALIDATE CONSTRAINT selfdestruct_has_from_and_to_address;
END $$;

@ -0,0 +1,80 @@
DO $$
DECLARE
row_count integer := 1;
batch_size integer := 50000; -- HOW MANY ITEMS WILL BE UPDATED AT TIME
iterator integer := batch_size;
max_row_number integer;
next_iterator integer;
updated_transaction_count integer;
deleted_internal_transaction_count integer;
deleted_row_count integer;
BEGIN
DROP TABLE IF EXISTS transactions_with_deprecated_internal_transactions;
-- CREATES TEMP TABLE TO STORE DATA TO BE UPDATED
CREATE TEMP TABLE transactions_with_deprecated_internal_transactions(
hash bytea NOT NULL,
row_number integer
);
INSERT INTO transactions_with_deprecated_internal_transactions
SELECT DISTINCT ON (transaction_hash)
transaction_hash,
ROW_NUMBER () OVER ()
FROM internal_transactions
WHERE
-- call_has_call_type CONSTRAINT
(type = 'call' AND call_type IS NULL) OR
-- call_has_input CONSTRAINT
(type = 'call' AND input IS NULL) OR
-- create_has_init CONSTRAINT
(type = 'create' AND init is NULL)
ORDER BY transaction_hash DESC;
max_row_number := (SELECT MAX(row_number) FROM transactions_with_deprecated_internal_transactions);
RAISE NOTICE '% transactions to be updated', max_row_number + 1;
-- ITERATES THROUGH THE ITEMS UNTIL THE TEMP TABLE IS EMPTY
WHILE iterator <= max_row_number LOOP
next_iterator := iterator + batch_size;
RAISE NOTICE '-> transactions with deprecated internal transactions % to % to be updated', iterator, next_iterator - 1;
UPDATE transactions
SET internal_transactions_indexed_at = NULL,
error = NULL
FROM transactions_with_deprecated_internal_transactions
WHERE transactions.hash = transactions_with_deprecated_internal_transactions.hash AND
transactions_with_deprecated_internal_transactions.row_number < next_iterator;
GET DIAGNOSTICS updated_transaction_count = ROW_COUNT;
RAISE NOTICE '-> % transactions updated to refetch internal transactions', updated_transaction_count;
DELETE FROM internal_transactions
USING transactions_with_deprecated_internal_transactions
WHERE internal_transactions.transaction_hash = transactions_with_deprecated_internal_transactions.hash AND
transactions_with_deprecated_internal_transactions.row_number < next_iterator;
GET DIAGNOSTICS deleted_internal_transaction_count = ROW_COUNT;
RAISE NOTICE '-> % internal transactions deleted', deleted_internal_transaction_count;
DELETE FROM transactions_with_deprecated_internal_transactions
WHERE row_number < next_iterator;
GET DIAGNOSTICS deleted_row_count = ROW_COUNT;
ASSERT updated_transaction_count = deleted_row_count;
-- COMMITS THE BATCH UPDATES
CHECKPOINT;
-- UPDATES THE COUNTER SO IT DOESN'T TURN INTO AN INFINITE LOOP
iterator := next_iterator;
END LOOP;
RAISE NOTICE 'All deprecated internal transactions will be refetched. Validating constraints.';
ALTER TABLE internal_transactions VALIDATE CONSTRAINT call_has_call_type;
ALTER TABLE internal_transactions VALIDATE CONSTRAINT call_has_input;
ALTER TABLE internal_transactions VALIDATE CONSTRAINT create_has_init;
END $$;

@ -3131,7 +3131,7 @@ defmodule Explorer.ChainTest do
token_id: 29
)
paging_options = %PagingOptions{key: {first_page.block_number, first_page.log_index}, page_size: 1}
paging_options = %PagingOptions{key: {first_page.token_id}, page_size: 1}
unique_tokens_ids_paginated =
token_contract_address.hash

@ -63,7 +63,9 @@ defmodule BlockScout.Mixfile do
# Documentation
{:ex_doc, "~> 0.19.0", only: [:dev]},
# Code coverage
{:excoveralls, "~> 0.10.0", only: [:test], github: "KronicDeth/excoveralls", branch: "circle-workflows"}
{:excoveralls, "~> 0.10.0", only: [:test], github: "KronicDeth/excoveralls", branch: "circle-workflows"},
# Release
{:distillery, "~> 2.0", runtime: false}
]
end
end

@ -6,6 +6,7 @@
"absinthe_plug": {:hex, :absinthe_plug, "1.4.6", "ac5d2d3d02acf52fda0f151b294017ab06e2ed1c6c15334e06aac82c94e36e08", [:mix], [{:absinthe, "~> 1.4.11", [hex: :absinthe, repo: "hexpm", optional: false]}, {:plug, "~> 1.3.2 or ~> 1.4", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"absinthe_relay": {:hex, :absinthe_relay, "1.4.4", "d0a6d8e71375a6026974d227456c8a73ea8eea7c7b00e698603ab5a96066c333", [:mix], [{:absinthe, "~> 1.4.0", [hex: :absinthe, repo: "hexpm", optional: false]}, {:ecto, "~> 2.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
"accept": {:hex, :accept, "0.3.3", "548ebb6fb2e8b0d170e75bb6123aea6ceecb0189bb1231eeadf52eac08384a97", [:rebar3], [], "hexpm"},
"artificery": {:hex, :artificery, "0.2.6", "f602909757263f7897130cbd006b0e40514a541b148d366ad65b89236b93497a", [:mix], [], "hexpm"},
"bcrypt_elixir": {:hex, :bcrypt_elixir, "1.1.1", "6b5560e47a02196ce5f0ab3f1d8265db79a23868c137e973b27afef928ed8006", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm"},
"benchee": {:hex, :benchee, "0.13.2", "30cd4ff5f593fdd218a9b26f3c24d580274f297d88ad43383afe525b1543b165", [:mix], [{:deep_merge, "~> 0.1", [hex: :deep_merge, repo: "hexpm", optional: false]}], "hexpm"},
"benchee_csv": {:hex, :benchee_csv, "0.8.0", "0ca094677d6e2b2f601b7ee7864b754789ef9d24d079432e5e3d6f4fb83a4d80", [:mix], [{:benchee, "~> 0.12", [hex: :benchee, optional: false]}, {:csv, "~> 2.0", [hex: :csv, optional: false]}]},
@ -25,6 +26,7 @@
"decimal": {:hex, :decimal, "1.5.0", "b0433a36d0e2430e3d50291b1c65f53c37d56f83665b43d79963684865beab68", [:mix], []},
"deep_merge": {:hex, :deep_merge, "0.2.0", "c1050fa2edf4848b9f556fba1b75afc66608a4219659e3311d9c9427b5b680b3", [:mix], [], "hexpm"},
"dialyxir": {:hex, :dialyxir, "0.5.1", "b331b091720fd93e878137add264bac4f644e1ddae07a70bf7062c7862c4b952", [:mix], []},
"distillery": {:hex, :distillery, "2.0.12", "6e78fe042df82610ac3fa50bd7d2d8190ad287d120d3cd1682d83a44e8b34dfb", [:mix], [{:artificery, "~> 0.2", [hex: :artificery, repo: "hexpm", optional: false]}], "hexpm"},
"earmark": {:hex, :earmark, "1.2.6", "b6da42b3831458d3ecc57314dff3051b080b9b2be88c2e5aa41cd642a5b044ed", [:mix], [], "hexpm"},
"ecto": {:hex, :ecto, "2.2.11", "4bb8f11718b72ba97a2696f65d247a379e739a0ecabf6a13ad1face79844791c", [:mix], [{:db_connection, "~> 1.1", [hex: :db_connection, repo: "hexpm", optional: true]}, {:decimal, "~> 1.2", [hex: :decimal, repo: "hexpm", optional: false]}, {:mariaex, "~> 0.8.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:poison, "~> 2.2 or ~> 3.0", [hex: :poison, repo: "hexpm", optional: true]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.13.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, repo: "hexpm", optional: true]}], "hexpm"},
"elixir_make": {:hex, :elixir_make, "0.4.2", "332c649d08c18bc1ecc73b1befc68c647136de4f340b548844efc796405743bf", [:mix], [], "hexpm"},

@ -0,0 +1,3 @@
#!/bin/sh
release_ctl eval --mfa "Explorer.ReleaseTasks.migrate/1" --argv -- "$@"

@ -0,0 +1,3 @@
#!/bin/sh
release_ctl eval --mfa "Explorer.ReleaseTasks.seed/1" --argv -- "$@"

@ -0,0 +1,85 @@
# Import all plugins from `rel/plugins`
# They can then be used by adding `plugin MyPlugin` to
# either an environment, or release definition, where
# `MyPlugin` is the name of the plugin module.
~w(rel plugins *.exs)
|> Path.join()
|> Path.wildcard()
|> Enum.map(&Code.eval_file(&1))
defer = fn fun ->
apply(fun, [])
end
app_root = fn ->
if String.contains?(System.cwd!(), "apps") do
Path.join([System.cwd!(), "/../../"])
else
System.cwd!()
end
end
cookie =
defer.(fn ->
cookie_bytes =
:crypto.strong_rand_bytes(32)
|> Base.encode32()
:ok = File.write!(Path.join(app_root.(), ".erlang_cookie"), cookie_bytes)
:erlang.binary_to_atom(cookie_bytes, :utf8)
end)
use Mix.Releases.Config,
# This sets the default release built by `mix release`
default_release: :default,
# This sets the default environment used by `mix release`
default_environment: Mix.env()
# For a full list of config options for both releases
# and environments, visit https://hexdocs.pm/distillery/config/distillery.html
# You may define one or more environments in this file,
# an environment's settings will override those of a release
# when building in that environment, this combination of release
# and environment configuration is called a profile
environment :dev do
# If you are running Phoenix, you should make sure that
# server: true is set and the code reloader is disabled,
# even in dev mode.
# It is recommended that you build with MIX_ENV=prod and pass
# the --env flag to Distillery explicitly if you want to use
# dev mode.
set dev_mode: true
set include_erts: false
set cookie: :"i6E,!mJ6|E&|.VPaDywo@N.o}BgmC$UdKXW[aK,(@U0Asfpp/NergA;CR%YW4;i6"
end
environment :prod do
set include_erts: true
set include_src: false
set cookie: cookie
set vm_args: "rel/vm.args"
end
# You may define one or more releases in this file.
# If you have not set a default release, or selected one
# when running `mix release`, the first release in the file
# will be used by default
release :blockscout do
set version: "1.2.0-beta"
set applications: [
:runtime_tools,
block_scout_web: :permanent,
ethereum_jsonrpc: :permanent,
explorer: :permanent,
indexer: :permanent
]
set commands: [
migrate: "rel/commands/migrate.sh",
seed: "rel/commands/seed.sh",
]
end

@ -0,0 +1,3 @@
*.*
!*.exs
!.gitignore

@ -0,0 +1,30 @@
## This file provide the arguments provided to the VM at startup
## You can find a full list of flags and their behaviours at
## http://erlang.org/doc/man/erl.html
## Name of the node
-name <%= release_name %>@127.0.0.1
## Cookie for distributed erlang
-setcookie <%= release.profile.cookie %>
## Heartbeat management; auto-restarts VM if it dies or becomes unresponsive
## (Disabled by default..use with caution!)
##-heart
## Enable kernel poll and a few async threads
##+K true
##+A 5
## For OTP21+, the +A flag is not used anymore,
## +SDio replace it to use dirty schedulers
##+SDio 5
## Increase number of concurrent ports/sockets
##-env ERL_MAX_PORTS 4096
## Tweak GC to run more often
##-env ERL_FULLSWEEP_AFTER 10
# Enable SMP automatically based on availability
# On OTP21+, this is not needed anymore.
-smp auto
Loading…
Cancel
Save