commit
4ec8ba3bec
@ -0,0 +1,80 @@ |
||||
import { asyncReducer, asyncInitialState } from '../../js/lib/async_listing_load' |
||||
|
||||
describe('ELEMENTS_LOAD', () => { |
||||
test('sets only nextPagePath and ignores other keys', () => { |
||||
const state = Object.assign({}, asyncInitialState) |
||||
const action = { type: 'ELEMENTS_LOAD', nextPagePath: 'set', foo: 1 } |
||||
const output = asyncReducer(state, action) |
||||
|
||||
expect(output.foo).not.toEqual(1) |
||||
expect(output.nextPagePath).toEqual('set') |
||||
}) |
||||
}) |
||||
|
||||
describe('ADD_ITEM_KEY', () => { |
||||
test('sets itemKey to what was passed in the action', () => { |
||||
const expectedItemKey = 'expected.Key' |
||||
|
||||
const state = Object.assign({}, asyncInitialState) |
||||
const action = { type: 'ADD_ITEM_KEY', itemKey: expectedItemKey }
|
||||
const output = asyncReducer(state, action) |
||||
|
||||
expect(output.itemKey).toEqual(expectedItemKey) |
||||
}) |
||||
}) |
||||
|
||||
describe('START_REQUEST', () => { |
||||
test('sets loading status to true', () => { |
||||
const state = Object.assign({}, asyncInitialState, { loading: false }) |
||||
const action = { type: 'START_REQUEST' }
|
||||
const output = asyncReducer(state, action) |
||||
|
||||
expect(output.loading).toEqual(true) |
||||
}) |
||||
}) |
||||
|
||||
describe('REQUEST_ERROR', () => { |
||||
test('sets requestError to true', () => { |
||||
const state = Object.assign({}, asyncInitialState, { requestError: false }) |
||||
const action = { type: 'REQUEST_ERROR' }
|
||||
const output = asyncReducer(state, action) |
||||
|
||||
expect(output.requestError).toEqual(true) |
||||
}) |
||||
}) |
||||
|
||||
describe('FINISH_REQUEST', () => { |
||||
test('sets loading status to false', () => { |
||||
const state = Object.assign({}, asyncInitialState, { |
||||
loading: true, |
||||
loadingFirstPage: true |
||||
}) |
||||
const action = { type: 'FINISH_REQUEST' }
|
||||
const output = asyncReducer(state, action) |
||||
|
||||
expect(output.loading).toEqual(false) |
||||
expect(output.loadingFirstPage).toEqual(false) |
||||
}) |
||||
}) |
||||
|
||||
describe('ITEMS_FETCHED', () => { |
||||
test('sets the items to what was passed in the action', () => { |
||||
const expectedItems = [1, 2, 3] |
||||
|
||||
const state = Object.assign({}, asyncInitialState) |
||||
const action = { type: 'ITEMS_FETCHED', items: expectedItems }
|
||||
const output = asyncReducer(state, action) |
||||
|
||||
expect(output.items).toEqual(expectedItems) |
||||
}) |
||||
}) |
||||
|
||||
describe('NAVIGATE_TO_OLDER', () => { |
||||
test('sets beyondPageOne to true', () => { |
||||
const state = Object.assign({}, asyncInitialState, { beyondPageOne: false }) |
||||
const action = { type: 'NAVIGATE_TO_OLDER' }
|
||||
const output = asyncReducer(state, action) |
||||
|
||||
expect(output.beyondPageOne).toEqual(true) |
||||
}) |
||||
}) |
@ -0,0 +1,19 @@ |
||||
//replace the default background color from highlightjs |
||||
.hljs { |
||||
background: $gray-100; |
||||
} |
||||
|
||||
.line-numbers { |
||||
|
||||
[data-line-number] { |
||||
|
||||
&:before { |
||||
content: attr(data-line-number); |
||||
display: inline-block; |
||||
border-right: 1px solid $gray-400; |
||||
padding: 0 .5em; |
||||
margin-right: .5em; |
||||
color: $gray-600 |
||||
} |
||||
} |
||||
} |
@ -1,85 +1,223 @@ |
||||
import $ from 'jquery' |
||||
import _ from 'lodash' |
||||
import URI from 'urijs' |
||||
import humps from 'humps' |
||||
import listMorph from '../lib/list_morph' |
||||
import reduceReducers from 'reduce-reducers' |
||||
import { createStore, connectElements } from '../lib/redux_helpers.js' |
||||
|
||||
/** |
||||
* This script is a generic function to load list within a tab async. See token transfers tab at Token's page as example. |
||||
* This is a generic lib to add pagination with asynchronous page loading. There are two ways of |
||||
* activating this in a page. |
||||
* |
||||
* If the page has no redux associated with, all you need is a markup with the following pattern: |
||||
* |
||||
* <div data-async-load data-async-listing="firstLoadPath"> |
||||
* <div data-loading-message> message </div> |
||||
* <div data-empty-response-message style="display: none;"> message </div> |
||||
* <div data-error-message style="display: none;"> message </div> |
||||
* <div data-items></div> |
||||
* <a data-next-page-button style="display: none;"> button text </a> |
||||
* <div data-loading-button style="display: none;"> loading text </div> |
||||
* </div> |
||||
* |
||||
* the data-async-load is the attribute responsible for binding the store. |
||||
* |
||||
* If the page has a redux associated with, you need to connect the reducers instead of creating |
||||
* the store using the `createStore`. For instance: |
||||
* |
||||
* To get it working the markup must follow the pattern below: |
||||
* // my_page.js
|
||||
* const initialState = { ... } |
||||
* const reducer = (state, action) => { ... } |
||||
* const store = createAsyncLoadStore(reducer, initialState, 'item.Key') |
||||
* |
||||
* <div data-async-listing="path"> |
||||
* <div data-loading-message> message </div> |
||||
* <div data-empty-response-message style="display: none;"> message </div> |
||||
* <div data-error-message style="display: none;"> message </div> |
||||
* <div data-items></div> |
||||
* <a data-next-page-button style="display: none;"> button text </a> |
||||
* <div data-loading-button style="display: none;"> loading text </div> |
||||
* </div> |
||||
* The createAsyncLoadStore function will return a store with asynchronous loading activated. This |
||||
* approach will expect the same markup above, except for data-async-load attribute, which is used |
||||
* to create a store and it is not necessary for this case. |
||||
* |
||||
*/ |
||||
const $element = $('[data-async-listing]') |
||||
|
||||
function asyncListing (element, path) { |
||||
const $mainElement = $(element) |
||||
const $items = $mainElement.find('[data-items]') |
||||
const $loading = $mainElement.find('[data-loading-message]') |
||||
const $nextPageButton = $mainElement.find('[data-next-page-button]') |
||||
const $loadingButton = $mainElement.find('[data-loading-button]') |
||||
const $errorMessage = $mainElement.find('[data-error-message]') |
||||
const $emptyResponseMessage = $mainElement.find('[data-empty-response-message]') |
||||
|
||||
$.getJSON(path, {type: 'JSON'}) |
||||
.done(response => { |
||||
if (!response.items || response.items.length === 0) { |
||||
$emptyResponseMessage.show() |
||||
$items.empty() |
||||
} else { |
||||
$items.html(response.items) |
||||
|
||||
export const asyncInitialState = { |
||||
/* it will consider any query param in the current URI as paging */ |
||||
beyondPageOne: (URI(window.location).query() !== ''), |
||||
/* an array with every html element of the list being shown */ |
||||
items: [], |
||||
/* the key for diffing the elements in the items array */ |
||||
itemKey: null, |
||||
/* represents whether a request is happening or not */ |
||||
loading: false, |
||||
/* if there was an error fetching items */ |
||||
requestError: false, |
||||
/* if it is loading the first page */ |
||||
loadingFirstPage: true, |
||||
/* link to the next page */ |
||||
nextPagePath: null |
||||
} |
||||
|
||||
export function asyncReducer (state = asyncInitialState, action) { |
||||
switch (action.type) { |
||||
case 'ELEMENTS_LOAD': { |
||||
return Object.assign({}, state, { nextPagePath: action.nextPagePath }) |
||||
} |
||||
case 'ADD_ITEM_KEY': { |
||||
return Object.assign({}, state, { itemKey: action.itemKey }) |
||||
} |
||||
case 'START_REQUEST': { |
||||
return Object.assign({}, state, { |
||||
loading: true, |
||||
requestError: false |
||||
}) |
||||
} |
||||
case 'REQUEST_ERROR': { |
||||
return Object.assign({}, state, { requestError: true }) |
||||
} |
||||
case 'FINISH_REQUEST': { |
||||
return Object.assign({}, state, { |
||||
loading: false, |
||||
loadingFirstPage: false |
||||
}) |
||||
} |
||||
case 'ITEMS_FETCHED': { |
||||
return Object.assign({}, state, { |
||||
requestError: false, |
||||
items: action.items, |
||||
nextPagePath: action.nextPagePath |
||||
}) |
||||
} |
||||
case 'NAVIGATE_TO_OLDER': { |
||||
history.replaceState({}, null, state.nextPagePath) |
||||
|
||||
return Object.assign({}, state, { beyondPageOne: true }) |
||||
} |
||||
default: |
||||
return state |
||||
} |
||||
} |
||||
|
||||
export const elements = { |
||||
'[data-async-listing]': { |
||||
load ($el) { |
||||
const nextPagePath = $el.data('async-listing') |
||||
|
||||
return { nextPagePath } |
||||
} |
||||
}, |
||||
'[data-async-listing] [data-loading-message]': { |
||||
render ($el, state) { |
||||
if (state.loadingFirstPage) return $el.show() |
||||
|
||||
$el.hide() |
||||
} |
||||
}, |
||||
'[data-async-listing] [data-empty-response-message]': { |
||||
render ($el, state) { |
||||
if ( |
||||
!state.requestError && |
||||
(!state.loading || !state.loadingFirstPage) && |
||||
state.items.length === 0 |
||||
) { |
||||
return $el.show() |
||||
} |
||||
if (response.next_page_path) { |
||||
$nextPageButton.attr('href', response.next_page_path) |
||||
$nextPageButton.show() |
||||
} else { |
||||
$nextPageButton.hide() |
||||
|
||||
$el.hide() |
||||
} |
||||
}, |
||||
'[data-async-listing] [data-error-message]': { |
||||
render ($el, state) { |
||||
if (state.requestError) return $el.show() |
||||
|
||||
$el.hide() |
||||
} |
||||
}, |
||||
'[data-async-listing] [data-items]': { |
||||
render ($el, state, oldState) { |
||||
if (state.items === oldState.items) return |
||||
|
||||
if (state.itemKey) { |
||||
const container = $el[0] |
||||
const newElements = _.map(state.items, (item) => $(item)[0]) |
||||
listMorph(container, newElements, { key: state.itemKey }) |
||||
return |
||||
} |
||||
}) |
||||
.fail(() => $errorMessage.show()) |
||||
.always(() => { |
||||
$loading.hide() |
||||
$loadingButton.hide() |
||||
}) |
||||
} |
||||
|
||||
if ($element.length === 1) { |
||||
$element.on('click', '[data-next-page-button]', (event) => { |
||||
event.preventDefault() |
||||
$el.html(state.items) |
||||
} |
||||
}, |
||||
'[data-async-listing] [data-next-page-button]': { |
||||
render ($el, state) { |
||||
if (state.requestError) return $el.hide() |
||||
if (!state.nextPagePath) return $el.hide() |
||||
if (state.loading) return $el.hide() |
||||
|
||||
const $button = $(event.target) |
||||
const path = $button.attr('href') |
||||
const $loadingButton = $element.find('[data-loading-button]') |
||||
$el.show() |
||||
$el.attr('href', state.nextPagePath) |
||||
} |
||||
}, |
||||
'[data-async-listing] [data-loading-button]': { |
||||
render ($el, state) { |
||||
if (!state.loadingFirstPage && state.loading) return $el.show() |
||||
|
||||
// change url to the next page link before loading the next page
|
||||
history.pushState({}, null, path) |
||||
$button.hide() |
||||
$loadingButton.show() |
||||
$el.hide() |
||||
} |
||||
} |
||||
} |
||||
|
||||
asyncListing($element, path) |
||||
}) |
||||
/** |
||||
* Create a store combining the given reducer and initial state with the async reducer. |
||||
* |
||||
* reducer: The reducer that will be merged with the asyncReducer to add async |
||||
* loading capabilities to a page. Any state changes in the reducer passed will be |
||||
* applied AFTER the asyncReducer. |
||||
* |
||||
* initialState: The initial state to be merged with the async state. Any state |
||||
* values passed here will overwrite the values on asyncInitialState. |
||||
* |
||||
* itemKey: it will be added to the state as the key for diffing the elements and |
||||
* adding or removing with the correct animation. Check list_morph.js for more informantion. |
||||
*/ |
||||
export function createAsyncLoadStore (reducer, initialState, itemKey) { |
||||
const state = _.merge(asyncInitialState, initialState) |
||||
const store = createStore(reduceReducers(asyncReducer, reducer, state)) |
||||
|
||||
$element.on('click', '[data-error-message]', (event) => { |
||||
event.preventDefault() |
||||
if (typeof itemKey !== 'undefined') { |
||||
store.dispatch({ |
||||
type: 'ADD_ITEM_KEY', |
||||
itemKey |
||||
}) |
||||
} |
||||
|
||||
// event.target had a weird behavior here
|
||||
// it hid the <a> tag but left the red div showing
|
||||
const $link = $element.find('[data-error-message]') |
||||
const $loading = $element.find('[data-loading-message]') |
||||
const path = $element.data('async-listing') |
||||
connectElements({store, elements}) |
||||
firstPageLoad(store) |
||||
return store |
||||
} |
||||
|
||||
$link.hide() |
||||
$loading.show() |
||||
function firstPageLoad (store) { |
||||
const $element = $('[data-async-listing]') |
||||
function loadItems () { |
||||
const path = store.getState().nextPagePath |
||||
store.dispatch({type: 'START_REQUEST'}) |
||||
$.getJSON(path, {type: 'JSON'}) |
||||
.done(response => store.dispatch(Object.assign({type: 'ITEMS_FETCHED'}, humps.camelizeKeys(response)))) |
||||
.fail(() => store.dispatch({type: 'REQUEST_ERROR'})) |
||||
.always(() => store.dispatch({type: 'FINISH_REQUEST'})) |
||||
} |
||||
loadItems() |
||||
|
||||
asyncListing($element, path) |
||||
$element.on('click', '[data-error-message]', (event) => { |
||||
event.preventDefault() |
||||
loadItems() |
||||
}) |
||||
|
||||
// force browser to reload when the user goes back a page
|
||||
$(window).on('popstate', () => location.reload()) |
||||
$element.on('click', '[data-next-page-button]', (event) => { |
||||
event.preventDefault() |
||||
loadItems() |
||||
store.dispatch({type: 'NAVIGATE_TO_OLDER'}) |
||||
}) |
||||
} |
||||
|
||||
asyncListing($element, $element.data('async-listing')) |
||||
const $element = $('[data-async-load]') |
||||
if ($element.length) { |
||||
const store = createStore(asyncReducer) |
||||
connectElements({store, elements}) |
||||
firstPageLoad(store) |
||||
} |
||||
|
@ -0,0 +1,9 @@ |
||||
import $ from 'jquery' |
||||
import hljs from 'highlight.js' |
||||
import hljsDefineSolidity from 'highlightjs-solidity' |
||||
|
||||
// only activate highlighting on pages with this selector
|
||||
if ($('[data-activate-highlight]').length > 0) { |
||||
hljsDefineSolidity(hljs) |
||||
hljs.initHighlightingOnLoad() |
||||
} |
@ -0,0 +1,121 @@ |
||||
defmodule BlockScoutWeb.Tokens.InventoryControllerTest do |
||||
use BlockScoutWeb.ConnCase |
||||
|
||||
describe "GET index/3" do |
||||
test "with invalid address hash", %{conn: conn} do |
||||
conn = get(conn, token_inventory_path(conn, :index, "invalid_address")) |
||||
|
||||
assert html_response(conn, 404) |
||||
end |
||||
|
||||
test "with a token that doesn't exist", %{conn: conn} do |
||||
address = build(:address) |
||||
conn = get(conn, token_inventory_path(conn, :index, address.hash)) |
||||
|
||||
assert html_response(conn, 404) |
||||
end |
||||
|
||||
test "successfully renders the page", %{conn: conn} do |
||||
token_contract_address = insert(:contract_address) |
||||
token = insert(:token, type: "ERC-721", contract_address: token_contract_address) |
||||
|
||||
transaction = |
||||
:transaction |
||||
|> insert() |
||||
|> with_block() |
||||
|
||||
insert( |
||||
:token_transfer, |
||||
transaction: transaction, |
||||
token_contract_address: token_contract_address, |
||||
token: token |
||||
) |
||||
|
||||
conn = |
||||
get( |
||||
conn, |
||||
token_inventory_path(conn, :index, token_contract_address.hash) |
||||
) |
||||
|
||||
assert html_response(conn, 200) |
||||
end |
||||
|
||||
test "returns next page of results based on last seen token balance", %{conn: conn} do |
||||
token = insert(:token, type: "ERC-721") |
||||
|
||||
transaction = |
||||
:transaction |
||||
|> insert() |
||||
|> with_block() |
||||
|
||||
second_page_token_balances = |
||||
Enum.map( |
||||
1..50, |
||||
&insert( |
||||
:token_transfer, |
||||
transaction: transaction, |
||||
token_contract_address: token.contract_address, |
||||
token: token, |
||||
token_id: &1 + 1000 |
||||
) |
||||
) |
||||
|
||||
conn = |
||||
get(conn, token_inventory_path(conn, :index, token.contract_address_hash), %{ |
||||
"token_id" => "999" |
||||
}) |
||||
|
||||
assert Enum.map(conn.assigns.unique_tokens, & &1.token_id) == Enum.map(second_page_token_balances, & &1.token_id) |
||||
end |
||||
|
||||
test "next_page_params exists if not on last page", %{conn: conn} do |
||||
token = insert(:token, type: "ERC-721") |
||||
|
||||
transaction = |
||||
:transaction |
||||
|> insert() |
||||
|> with_block() |
||||
|
||||
Enum.each( |
||||
1..51, |
||||
&insert( |
||||
:token_transfer, |
||||
transaction: transaction, |
||||
token_contract_address: token.contract_address, |
||||
token: token, |
||||
token_id: &1 + 1000 |
||||
) |
||||
) |
||||
|
||||
expected_next_page_params = %{ |
||||
"token_id" => to_string(token.contract_address_hash), |
||||
"unique_token" => 1050 |
||||
} |
||||
|
||||
conn = get(conn, token_inventory_path(conn, :index, token.contract_address_hash)) |
||||
|
||||
assert conn.assigns.next_page_params == expected_next_page_params |
||||
end |
||||
|
||||
test "next_page_params are empty if on last page", %{conn: conn} do |
||||
token = insert(:token, type: "ERC-721") |
||||
|
||||
transaction = |
||||
:transaction |
||||
|> insert() |
||||
|> with_block() |
||||
|
||||
insert( |
||||
:token_transfer, |
||||
transaction: transaction, |
||||
token_contract_address: token.contract_address, |
||||
token: token, |
||||
token_id: 1000 |
||||
) |
||||
|
||||
conn = get(conn, token_inventory_path(conn, :index, token.contract_address_hash)) |
||||
|
||||
refute conn.assigns.next_page_params |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,93 @@ |
||||
defmodule Explorer.ReleaseTasks do |
||||
@moduledoc """ |
||||
Release tasks used to migrate or generate seeds. |
||||
""" |
||||
|
||||
alias Ecto.Migrator |
||||
|
||||
@start_apps [ |
||||
:crypto, |
||||
:ssl, |
||||
:postgrex, |
||||
:ecto, |
||||
# If using Ecto 3.0 or higher |
||||
:ecto_sql |
||||
] |
||||
|
||||
@repos Application.get_env(:blockscout, :ecto_repos, [Explorer.Repo]) |
||||
|
||||
def migrate(_argv) do |
||||
start_services() |
||||
|
||||
run_migrations() |
||||
|
||||
stop_services() |
||||
end |
||||
|
||||
def seed(_argv) do |
||||
start_services() |
||||
|
||||
run_migrations() |
||||
|
||||
run_seeds() |
||||
|
||||
stop_services() |
||||
end |
||||
|
||||
defp start_services do |
||||
IO.puts("Starting dependencies..") |
||||
# Start apps necessary for executing migrations |
||||
Enum.each(@start_apps, &Application.ensure_all_started/1) |
||||
|
||||
# Start the Repo(s) for app |
||||
IO.puts("Starting repos..") |
||||
|
||||
# Switch pool_size to 2 for ecto > 3.0 |
||||
Enum.each(@repos, & &1.start_link(pool_size: 1)) |
||||
end |
||||
|
||||
defp stop_services do |
||||
IO.puts("Success!") |
||||
:init.stop() |
||||
end |
||||
|
||||
defp run_migrations do |
||||
Enum.each(@repos, &run_migrations_for/1) |
||||
end |
||||
|
||||
defp run_migrations_for(repo) do |
||||
app = Keyword.get(repo.config, :otp_app) |
||||
IO.puts("Running migrations for #{app}") |
||||
migrations_path = priv_path_for(repo, "migrations") |
||||
Migrator.run(repo, migrations_path, :up, all: true) |
||||
end |
||||
|
||||
defp run_seeds do |
||||
Enum.each(@repos, &run_seeds_for/1) |
||||
end |
||||
|
||||
# sobelow_skip ["RCE.CodeModule"] |
||||
defp run_seeds_for(repo) do |
||||
# Run the seed script if it exists |
||||
seed_script = priv_path_for(repo, "seeds.exs") |
||||
|
||||
if File.exists?(seed_script) do |
||||
IO.puts("Running seed script..") |
||||
Code.eval_file(seed_script) |
||||
end |
||||
end |
||||
|
||||
defp priv_path_for(repo, filename) do |
||||
app = Keyword.get(repo.config, :otp_app) |
||||
|
||||
repo_underscore = |
||||
repo |
||||
|> Module.split() |
||||
|> List.last() |
||||
|> Macro.underscore() |
||||
|
||||
priv_dir = "#{:code.priv_dir(app)}" |
||||
|
||||
Path.join([priv_dir, repo_underscore, filename]) |
||||
end |
||||
end |
@ -0,0 +1,64 @@ |
||||
DO $$ |
||||
DECLARE |
||||
row_count integer := 1; |
||||
batch_size integer := 50000; -- HOW MANY ITEMS WILL BE UPDATED AT TIME |
||||
iterator integer := batch_size; |
||||
max_row_number integer; |
||||
next_iterator integer; |
||||
updated_row_count integer; |
||||
deleted_row_count integer; |
||||
BEGIN |
||||
DROP TABLE IF EXISTS current_suicide_internal_transactions_temp; |
||||
-- CREATES TEMP TABLE TO STORE DATA TO BE UPDATED |
||||
CREATE TEMP TABLE current_suicide_internal_transactions_temp( |
||||
transaction_hash bytea NOT NULL, |
||||
index bigint NOT NULL, |
||||
row_number integer |
||||
); |
||||
INSERT INTO current_suicide_internal_transactions_temp |
||||
SELECT DISTINCT ON (transaction_hash, index) |
||||
transaction_hash, |
||||
index, |
||||
ROW_NUMBER () OVER () |
||||
FROM internal_transactions |
||||
WHERE type = 'suicide' |
||||
ORDER BY transaction_hash, index DESC; |
||||
|
||||
max_row_number := (SELECT MAX(row_number) FROM current_suicide_internal_transactions_temp); |
||||
RAISE NOTICE '% items to be updated', max_row_number + 1; |
||||
|
||||
-- ITERATES THROUGH THE ITEMS UNTIL THE TEMP TABLE IS EMPTY |
||||
WHILE iterator <= max_row_number LOOP |
||||
next_iterator := iterator + batch_size; |
||||
|
||||
RAISE NOTICE '-> suicide internal transactions % to % to be updated', iterator, next_iterator - 1; |
||||
|
||||
UPDATE internal_transactions |
||||
SET type = 'selfdestruct' |
||||
FROM current_suicide_internal_transactions_temp |
||||
WHERE internal_transactions.transaction_hash = current_suicide_internal_transactions_temp.transaction_hash AND |
||||
internal_transactions.index = current_suicide_internal_transactions_temp.index AND |
||||
current_suicide_internal_transactions_temp.row_number < next_iterator; |
||||
|
||||
GET DIAGNOSTICS updated_row_count = ROW_COUNT; |
||||
|
||||
RAISE NOTICE '-> % internal transactions updated from suicide to selfdesruct', updated_row_count; |
||||
|
||||
DELETE FROM current_suicide_internal_transactions_temp |
||||
WHERE row_number < next_iterator; |
||||
|
||||
GET DIAGNOSTICS deleted_row_count = ROW_COUNT; |
||||
|
||||
ASSERT updated_row_count = deleted_row_count; |
||||
|
||||
-- COMMITS THE BATCH UPDATES |
||||
CHECKPOINT; |
||||
|
||||
-- UPDATES THE COUNTER SO IT DOESN'T TURN INTO AN INFINITE LOOP |
||||
iterator := next_iterator; |
||||
END LOOP; |
||||
|
||||
RAISE NOTICE 'All suicide type internal transactions updated to selfdestruct. Validating constraint.'; |
||||
|
||||
ALTER TABLE internal_transactions VALIDATE CONSTRAINT selfdestruct_has_from_and_to_address; |
||||
END $$; |
@ -0,0 +1,80 @@ |
||||
DO $$ |
||||
DECLARE |
||||
row_count integer := 1; |
||||
batch_size integer := 50000; -- HOW MANY ITEMS WILL BE UPDATED AT TIME |
||||
iterator integer := batch_size; |
||||
max_row_number integer; |
||||
next_iterator integer; |
||||
updated_transaction_count integer; |
||||
deleted_internal_transaction_count integer; |
||||
deleted_row_count integer; |
||||
BEGIN |
||||
DROP TABLE IF EXISTS transactions_with_deprecated_internal_transactions; |
||||
-- CREATES TEMP TABLE TO STORE DATA TO BE UPDATED |
||||
CREATE TEMP TABLE transactions_with_deprecated_internal_transactions( |
||||
hash bytea NOT NULL, |
||||
row_number integer |
||||
); |
||||
INSERT INTO transactions_with_deprecated_internal_transactions |
||||
SELECT DISTINCT ON (transaction_hash) |
||||
transaction_hash, |
||||
ROW_NUMBER () OVER () |
||||
FROM internal_transactions |
||||
WHERE |
||||
-- call_has_call_type CONSTRAINT |
||||
(type = 'call' AND call_type IS NULL) OR |
||||
-- call_has_input CONSTRAINT |
||||
(type = 'call' AND input IS NULL) OR |
||||
-- create_has_init CONSTRAINT |
||||
(type = 'create' AND init is NULL) |
||||
ORDER BY transaction_hash DESC; |
||||
|
||||
max_row_number := (SELECT MAX(row_number) FROM transactions_with_deprecated_internal_transactions); |
||||
RAISE NOTICE '% transactions to be updated', max_row_number + 1; |
||||
|
||||
-- ITERATES THROUGH THE ITEMS UNTIL THE TEMP TABLE IS EMPTY |
||||
WHILE iterator <= max_row_number LOOP |
||||
next_iterator := iterator + batch_size; |
||||
|
||||
RAISE NOTICE '-> transactions with deprecated internal transactions % to % to be updated', iterator, next_iterator - 1; |
||||
|
||||
UPDATE transactions |
||||
SET internal_transactions_indexed_at = NULL, |
||||
error = NULL |
||||
FROM transactions_with_deprecated_internal_transactions |
||||
WHERE transactions.hash = transactions_with_deprecated_internal_transactions.hash AND |
||||
transactions_with_deprecated_internal_transactions.row_number < next_iterator; |
||||
|
||||
GET DIAGNOSTICS updated_transaction_count = ROW_COUNT; |
||||
|
||||
RAISE NOTICE '-> % transactions updated to refetch internal transactions', updated_transaction_count; |
||||
|
||||
DELETE FROM internal_transactions |
||||
USING transactions_with_deprecated_internal_transactions |
||||
WHERE internal_transactions.transaction_hash = transactions_with_deprecated_internal_transactions.hash AND |
||||
transactions_with_deprecated_internal_transactions.row_number < next_iterator; |
||||
|
||||
GET DIAGNOSTICS deleted_internal_transaction_count = ROW_COUNT; |
||||
|
||||
RAISE NOTICE '-> % internal transactions deleted', deleted_internal_transaction_count; |
||||
|
||||
DELETE FROM transactions_with_deprecated_internal_transactions |
||||
WHERE row_number < next_iterator; |
||||
|
||||
GET DIAGNOSTICS deleted_row_count = ROW_COUNT; |
||||
|
||||
ASSERT updated_transaction_count = deleted_row_count; |
||||
|
||||
-- COMMITS THE BATCH UPDATES |
||||
CHECKPOINT; |
||||
|
||||
-- UPDATES THE COUNTER SO IT DOESN'T TURN INTO AN INFINITE LOOP |
||||
iterator := next_iterator; |
||||
END LOOP; |
||||
|
||||
RAISE NOTICE 'All deprecated internal transactions will be refetched. Validating constraints.'; |
||||
|
||||
ALTER TABLE internal_transactions VALIDATE CONSTRAINT call_has_call_type; |
||||
ALTER TABLE internal_transactions VALIDATE CONSTRAINT call_has_input; |
||||
ALTER TABLE internal_transactions VALIDATE CONSTRAINT create_has_init; |
||||
END $$; |
@ -0,0 +1,3 @@ |
||||
#!/bin/sh |
||||
|
||||
release_ctl eval --mfa "Explorer.ReleaseTasks.migrate/1" --argv -- "$@" |
@ -0,0 +1,3 @@ |
||||
#!/bin/sh |
||||
|
||||
release_ctl eval --mfa "Explorer.ReleaseTasks.seed/1" --argv -- "$@" |
@ -0,0 +1,85 @@ |
||||
# Import all plugins from `rel/plugins` |
||||
# They can then be used by adding `plugin MyPlugin` to |
||||
# either an environment, or release definition, where |
||||
# `MyPlugin` is the name of the plugin module. |
||||
~w(rel plugins *.exs) |
||||
|> Path.join() |
||||
|> Path.wildcard() |
||||
|> Enum.map(&Code.eval_file(&1)) |
||||
|
||||
defer = fn fun -> |
||||
apply(fun, []) |
||||
end |
||||
|
||||
app_root = fn -> |
||||
if String.contains?(System.cwd!(), "apps") do |
||||
Path.join([System.cwd!(), "/../../"]) |
||||
else |
||||
System.cwd!() |
||||
end |
||||
end |
||||
|
||||
cookie = |
||||
defer.(fn -> |
||||
cookie_bytes = |
||||
:crypto.strong_rand_bytes(32) |
||||
|> Base.encode32() |
||||
|
||||
:ok = File.write!(Path.join(app_root.(), ".erlang_cookie"), cookie_bytes) |
||||
:erlang.binary_to_atom(cookie_bytes, :utf8) |
||||
end) |
||||
|
||||
use Mix.Releases.Config, |
||||
# This sets the default release built by `mix release` |
||||
default_release: :default, |
||||
# This sets the default environment used by `mix release` |
||||
default_environment: Mix.env() |
||||
|
||||
# For a full list of config options for both releases |
||||
# and environments, visit https://hexdocs.pm/distillery/config/distillery.html |
||||
|
||||
|
||||
# You may define one or more environments in this file, |
||||
# an environment's settings will override those of a release |
||||
# when building in that environment, this combination of release |
||||
# and environment configuration is called a profile |
||||
|
||||
environment :dev do |
||||
# If you are running Phoenix, you should make sure that |
||||
# server: true is set and the code reloader is disabled, |
||||
# even in dev mode. |
||||
# It is recommended that you build with MIX_ENV=prod and pass |
||||
# the --env flag to Distillery explicitly if you want to use |
||||
# dev mode. |
||||
set dev_mode: true |
||||
set include_erts: false |
||||
set cookie: :"i6E,!mJ6|E&|.VPaDywo@N.o}BgmC$UdKXW[aK,(@U0Asfpp/NergA;CR%YW4;i6" |
||||
end |
||||
|
||||
environment :prod do |
||||
set include_erts: true |
||||
set include_src: false |
||||
set cookie: cookie |
||||
set vm_args: "rel/vm.args" |
||||
end |
||||
|
||||
# You may define one or more releases in this file. |
||||
# If you have not set a default release, or selected one |
||||
# when running `mix release`, the first release in the file |
||||
# will be used by default |
||||
|
||||
release :blockscout do |
||||
set version: "1.2.0-beta" |
||||
set applications: [ |
||||
:runtime_tools, |
||||
block_scout_web: :permanent, |
||||
ethereum_jsonrpc: :permanent, |
||||
explorer: :permanent, |
||||
indexer: :permanent |
||||
] |
||||
set commands: [ |
||||
migrate: "rel/commands/migrate.sh", |
||||
seed: "rel/commands/seed.sh", |
||||
] |
||||
end |
||||
|
@ -0,0 +1,3 @@ |
||||
*.* |
||||
!*.exs |
||||
!.gitignore |
@ -0,0 +1,30 @@ |
||||
## This file provide the arguments provided to the VM at startup |
||||
## You can find a full list of flags and their behaviours at |
||||
## http://erlang.org/doc/man/erl.html |
||||
|
||||
## Name of the node |
||||
-name <%= release_name %>@127.0.0.1 |
||||
|
||||
## Cookie for distributed erlang |
||||
-setcookie <%= release.profile.cookie %> |
||||
|
||||
## Heartbeat management; auto-restarts VM if it dies or becomes unresponsive |
||||
## (Disabled by default..use with caution!) |
||||
##-heart |
||||
|
||||
## Enable kernel poll and a few async threads |
||||
##+K true |
||||
##+A 5 |
||||
## For OTP21+, the +A flag is not used anymore, |
||||
## +SDio replace it to use dirty schedulers |
||||
##+SDio 5 |
||||
|
||||
## Increase number of concurrent ports/sockets |
||||
##-env ERL_MAX_PORTS 4096 |
||||
|
||||
## Tweak GC to run more often |
||||
##-env ERL_FULLSWEEP_AFTER 10 |
||||
|
||||
# Enable SMP automatically based on availability |
||||
# On OTP21+, this is not needed anymore. |
||||
-smp auto |
Loading…
Reference in new issue