Merge branch 'master' into master

pull/2395/head
Victor Baranov 5 years ago committed by GitHub
commit 390624816c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 3
      CHANGELOG.md
  2. 1
      apps/block_scout_web/assets/__tests__/pages/pending_transactions.js
  3. 7
      apps/block_scout_web/assets/js/lib/async_listing_load.js
  4. 4
      apps/block_scout_web/assets/js/lib/infinite_scroll_helpers.js
  5. 29
      apps/block_scout_web/assets/js/lib/list_morph.js
  6. 10
      apps/block_scout_web/assets/js/lib/redux_helpers.js
  7. 4
      apps/block_scout_web/assets/js/lib/utils.js
  8. 4
      apps/block_scout_web/assets/js/pages/address.js
  9. 4
      apps/block_scout_web/assets/js/pages/address/coin_balances.js
  10. 4
      apps/block_scout_web/assets/js/pages/address/internal_transactions.js
  11. 4
      apps/block_scout_web/assets/js/pages/address/logs.js
  12. 4
      apps/block_scout_web/assets/js/pages/address/transactions.js
  13. 4
      apps/block_scout_web/assets/js/pages/address/validations.js
  14. 19
      apps/block_scout_web/assets/js/pages/blocks.js
  15. 18
      apps/block_scout_web/assets/js/pages/chain.js
  16. 4
      apps/block_scout_web/assets/js/pages/pending_transactions.js
  17. 4
      apps/block_scout_web/assets/js/pages/transaction.js
  18. 4
      apps/block_scout_web/assets/js/pages/transactions.js
  19. 4
      apps/block_scout_web/assets/js/pages/verification_form.js
  20. 2
      apps/block_scout_web/assets/webpack.config.js
  21. 329
      apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/eth_controller.ex
  22. 4
      apps/block_scout_web/lib/block_scout_web/controllers/api_docs_controller.ex
  23. 51
      apps/block_scout_web/test/block_scout_web/controllers/api/rpc/eth_controller_test.exs
  24. 3
      apps/explorer/lib/explorer/chain/transaction.ex
  25. 371
      apps/explorer/lib/explorer/eth_rpc.ex
  26. 35
      apps/explorer/lib/explorer/etherscan/logs.ex
  27. 40
      apps/explorer/test/explorer/etherscan/logs_test.exs

@ -1,13 +1,16 @@
## Current
### Features
- [#2366](https://github.com/poanetwork/blockscout/pull/2366) - paginate eth logs
- [#2379](https://github.com/poanetwork/blockscout/pull/2379) - Disable network selector when is empty
- [#2360](https://github.com/poanetwork/blockscout/pull/2360) - add default evm version to smart contract verification
- [#2352](https://github.com/poanetwork/blockscout/pull/2352) - Fetch rewards in parallel with transactions
- [#2294](https://github.com/poanetwork/blockscout/pull/2294) - add healthy block period checking endpoint
### Fixes
- [#2398](https://github.com/poanetwork/blockscout/pull/2398) - show only one decoded candidate
- [#2395](https://github.com/poanetwork/blockscout/pull/2395) - new block loading animation
- [#2389](https://github.com/poanetwork/blockscout/pull/2389) - Reduce Lodash lib size (86% of lib methods are not used)
- [#2378](https://github.com/poanetwork/blockscout/pull/2378) - Page performance: exclude moment.js localization files except EN, remove unused css
- [#2368](https://github.com/poanetwork/blockscout/pull/2368) - add two columns of smart contract info
- [#2375](https://github.com/poanetwork/blockscout/pull/2375) - Update created_contract_code_indexed_at on transaction import conflict

@ -1,4 +1,3 @@
import _ from 'lodash'
import { reducer, initialState } from '../../js/pages/pending_transactions'
test('CHANNEL_DISCONNECTED', () => {

@ -1,5 +1,6 @@
import $ from 'jquery'
import _ from 'lodash'
import map from 'lodash/map'
import merge from 'lodash/merge'
import URI from 'urijs'
import humps from 'humps'
import listMorph from '../lib/list_morph'
@ -164,7 +165,7 @@ export const elements = {
if (state.itemKey) {
const container = $el[0]
const newElements = _.map(state.items, (item) => $(item)[0])
const newElements = map(state.items, (item) => $(item)[0])
listMorph(container, newElements, { key: state.itemKey })
return
}
@ -244,7 +245,7 @@ export const elements = {
* adding or removing with the correct animation. Check list_morph.js for more informantion.
*/
export function createAsyncLoadStore (reducer, initialState, itemKey) {
const state = _.merge(asyncInitialState, initialState)
const state = merge(asyncInitialState, initialState)
const store = createStore(reduceReducers(asyncReducer, reducer, state))
if (typeof itemKey !== 'undefined') {

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import humps from 'humps'
import { connectElements } from './redux_helpers.js'
@ -12,7 +12,7 @@ const initialState = {
function infiniteScrollReducer (state = initialState, action) {
switch (action.type) {
case 'INFINITE_SCROLL_ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'LOADING_NEXT_PAGE': {
return Object.assign({}, state, {

@ -1,5 +1,10 @@
import $ from 'jquery'
import _ from 'lodash'
import map from 'lodash/map'
import get from 'lodash/get'
import noop from 'lodash/noop'
import find from 'lodash/find'
import intersectionBy from 'lodash/intersectionBy'
import differenceBy from 'lodash/differenceBy'
import morph from 'nanomorph'
import { updateAllAges } from './from_now'
@ -25,12 +30,12 @@ import { updateAllAges } from './from_now'
export default function (container, newElements, { key, horizontal } = {}) {
if (!container) return
const oldElements = $(container).children().get()
let currentList = _.map(oldElements, (el) => ({ id: _.get(el, key), el }))
const newList = _.map(newElements, (el) => ({ id: _.get(el, key), el }))
const overlap = _.intersectionBy(newList, currentList, 'id').map(({ id, el }) => ({ id, el: updateAllAges($(el))[0] }))
let currentList = map(oldElements, (el) => ({ id: get(el, key), el }))
const newList = map(newElements, (el) => ({ id: get(el, key), el }))
const overlap = intersectionBy(newList, currentList, 'id').map(({ id, el }) => ({ id, el: updateAllAges($(el))[0] }))
// remove old items
const removals = _.differenceBy(currentList, newList, 'id')
const removals = differenceBy(currentList, newList, 'id')
let canAnimate = !horizontal && removals.length <= 1
removals.forEach(({ el }) => {
if (!canAnimate) return el.remove()
@ -38,7 +43,7 @@ export default function (container, newElements, { key, horizontal } = {}) {
$el.addClass('shrink-out')
setTimeout(() => { slideUpRemove($el) }, 400)
})
currentList = _.differenceBy(currentList, removals, 'id')
currentList = differenceBy(currentList, removals, 'id')
// update kept items
currentList = currentList.map(({ el }, i) => ({
@ -47,14 +52,14 @@ export default function (container, newElements, { key, horizontal } = {}) {
}))
// add new items
const finalList = newList.map(({ id, el }) => _.get(_.find(currentList, { id }), 'el', el)).reverse()
const finalList = newList.map(({ id, el }) => get(find(currentList, { id }), 'el', el)).reverse()
canAnimate = !horizontal
finalList.forEach((el, i) => {
if (el.parentElement) return
if (!canAnimate) return container.insertBefore(el, _.get(finalList, `[${i - 1}]`))
if (!canAnimate) return container.insertBefore(el, get(finalList, `[${i - 1}]`))
canAnimate = false
if (!_.get(finalList, `[${i - 1}]`)) return slideDownAppend($(container), el)
slideDownBefore($(_.get(finalList, `[${i - 1}]`)), el)
if (!get(finalList, `[${i - 1}]`)) return slideDownAppend($(container), el)
slideDownBefore($(get(finalList, `[${i - 1}]`)), el)
})
}
@ -80,7 +85,7 @@ function slideUpRemove ($el) {
})
}
function smarterSlideDown ($el, { insert = _.noop } = {}) {
function smarterSlideDown ($el, { insert = noop } = {}) {
if (!$el.length) return
const originalScrollHeight = document.body.scrollHeight
const scrollPosition = window.scrollY
@ -100,7 +105,7 @@ function smarterSlideDown ($el, { insert = _.noop } = {}) {
}
}
function smarterSlideUp ($el, { complete = _.noop } = {}) {
function smarterSlideUp ($el, { complete = noop } = {}) {
if (!$el.length) return
const originalScrollHeight = document.body.scrollHeight
const scrollPosition = window.scrollY

@ -1,5 +1,7 @@
import $ from 'jquery'
import _ from 'lodash'
import reduce from 'lodash/reduce'
import isObject from 'lodash/isObject'
import forIn from 'lodash/forIn'
import { createStore as reduxCreateStore } from 'redux'
/**
@ -97,17 +99,17 @@ export function createStore (reducer) {
*/
export function connectElements ({ elements, store, action = 'ELEMENTS_LOAD' }) {
function loadElements () {
return _.reduce(elements, (pageLoadParams, { load }, selector) => {
return reduce(elements, (pageLoadParams, { load }, selector) => {
if (!load) return pageLoadParams
const $el = $(selector)
if (!$el.length) return pageLoadParams
const morePageLoadParams = load($el, store)
return _.isObject(morePageLoadParams) ? Object.assign(pageLoadParams, morePageLoadParams) : pageLoadParams
return isObject(morePageLoadParams) ? Object.assign(pageLoadParams, morePageLoadParams) : pageLoadParams
}, {})
}
function renderElements (state, oldState) {
_.forIn(elements, ({ render }, selector) => {
forIn(elements, ({ render }, selector) => {
if (!render) return
const $el = $(selector)
if (!$el.length) return

@ -1,8 +1,8 @@
import _ from 'lodash'
import debounce from 'lodash/debounce'
export function batchChannel (func) {
let msgs = []
const debouncedFunc = _.debounce(() => {
const debouncedFunc = debounce(() => {
func.apply(this, [msgs])
msgs = []
}, 1000, { maxWait: 5000 })

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import URI from 'urijs'
import humps from 'humps'
import numeral from 'numeral'
@ -25,7 +25,7 @@ export function reducer (state = initialState, action) {
switch (action.type) {
case 'PAGE_LOAD':
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'CHANNEL_DISCONNECTED': {
if (state.beyondPageOne) return state

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import humps from 'humps'
import socket from '../../socket'
import { connectElements } from '../../lib/redux_helpers.js'
@ -14,7 +14,7 @@ export function reducer (state, action) {
switch (action.type) {
case 'PAGE_LOAD':
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'CHANNEL_DISCONNECTED': {
if (state.beyondPageOne) return state

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import humps from 'humps'
import numeral from 'numeral'
import socket from '../../socket'
@ -20,7 +20,7 @@ export function reducer (state, action) {
switch (action.type) {
case 'PAGE_LOAD':
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'CHANNEL_DISCONNECTED': {
if (state.beyondPageOne) return state

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import humps from 'humps'
import { connectElements } from '../../lib/redux_helpers.js'
import { createAsyncLoadStore } from '../../lib/async_listing_load'
@ -13,7 +13,7 @@ export function reducer (state, action) {
switch (action.type) {
case 'PAGE_LOAD':
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'START_SEARCH': {
return Object.assign({}, state, {pagesStack: [], isSearch: true})

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import URI from 'urijs'
import humps from 'humps'
import { subscribeChannel } from '../../socket'
@ -16,7 +16,7 @@ export function reducer (state, action) {
switch (action.type) {
case 'PAGE_LOAD':
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'CHANNEL_DISCONNECTED': {
if (state.beyondPageOne) return state

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import humps from 'humps'
import socket from '../../socket'
import { connectElements } from '../../lib/redux_helpers.js'
@ -14,7 +14,7 @@ export function reducer (state = initialState, action) {
switch (action.type) {
case 'PAGE_LOAD':
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'CHANNEL_DISCONNECTED': {
return Object.assign({}, state, { channelDisconnected: true })

@ -1,5 +1,10 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import last from 'lodash/last'
import min from 'lodash/min'
import max from 'lodash/max'
import keys from 'lodash/keys'
import rangeRight from 'lodash/rangeRight'
import humps from 'humps'
import socket from '../socket'
import { connectElements } from '../lib/redux_helpers.js'
@ -14,7 +19,7 @@ export const blockReducer = withMissingBlocks(baseReducer)
function baseReducer (state = initialState, action) {
switch (action.type) {
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'CHANNEL_DISCONNECTED': {
return Object.assign({}, state, {
@ -25,7 +30,7 @@ function baseReducer (state = initialState, action) {
if (state.channelDisconnected || state.beyondPageOne || state.blockType !== 'block') return state
const blockNumber = getBlockNumber(action.msg.blockHtml)
const minBlock = getBlockNumber(_.last(state.items))
const minBlock = getBlockNumber(last(state.items))
if (state.items.length && blockNumber < minBlock) return state
@ -62,12 +67,12 @@ function withMissingBlocks (reducer) {
return acc
}, {})
const blockNumbers = _(blockNumbersToItems).keys().map(x => parseInt(x, 10)).value()
const minBlock = _.min(blockNumbers)
const maxBlock = _.max(blockNumbers)
const blockNumbers = keys(blockNumbersToItems).map(x => parseInt(x, 10))
const minBlock = min(blockNumbers)
const maxBlock = max(blockNumbers)
return Object.assign({}, result, {
items: _.rangeRight(minBlock, maxBlock + 1)
items: rangeRight(minBlock, maxBlock + 1)
.map((blockNumber) => blockNumbersToItems[blockNumber] || placeHolderBlock(blockNumber))
})
}

@ -1,5 +1,9 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import first from 'lodash/first'
import rangeRight from 'lodash/rangeRight'
import find from 'lodash/find'
import map from 'lodash/map'
import humps from 'humps'
import numeral from 'numeral'
import socket from '../socket'
@ -33,7 +37,7 @@ export const reducer = withMissingBlocks(baseReducer)
function baseReducer (state = initialState, action) {
switch (action.type) {
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'RECEIVED_NEW_ADDRESS_COUNT': {
return Object.assign({}, state, {
@ -122,12 +126,12 @@ function withMissingBlocks (reducer) {
if (!result.blocks || result.blocks.length < 2) return result
const maxBlock = _.first(result.blocks).blockNumber
const maxBlock = first(result.blocks).blockNumber
const minBlock = maxBlock - (result.blocks.length - 1)
return Object.assign({}, result, {
blocks: _.rangeRight(minBlock, maxBlock + 1)
.map((blockNumber) => _.find(result.blocks, ['blockNumber', blockNumber]) || {
blocks: rangeRight(minBlock, maxBlock + 1)
.map((blockNumber) => find(result.blocks, ['blockNumber', blockNumber]) || {
blockNumber,
chainBlockHtml: placeHolderBlock(blockNumber)
})
@ -194,7 +198,7 @@ const elements = {
const container = $el[0]
if (state.blocksLoading === false) {
const blocks = _.map(state.blocks, ({ chainBlockHtml }) => $(chainBlockHtml)[0])
const blocks = map(state.blocks, ({ chainBlockHtml }) => $(chainBlockHtml)[0])
listMorph(container, blocks, { key: 'dataset.blockNumber', horizontal: true })
}
}
@ -234,7 +238,7 @@ const elements = {
render ($el, state, oldState) {
if (oldState.transactions === state.transactions) return
const container = $el[0]
const newElements = _.map(state.transactions, ({ transactionHtml }) => $(transactionHtml)[0])
const newElements = map(state.transactions, ({ transactionHtml }) => $(transactionHtml)[0])
listMorph(container, newElements, { key: 'dataset.identifierHash' })
}
},

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import humps from 'humps'
import numeral from 'numeral'
import socket from '../socket'
@ -20,7 +20,7 @@ export const initialState = {
export function reducer (state = initialState, action) {
switch (action.type) {
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'CHANNEL_DISCONNECTED': {
return Object.assign({}, state, {

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import humps from 'humps'
import numeral from 'numeral'
import socket from '../socket'
@ -13,7 +13,7 @@ export const initialState = {
export function reducer (state = initialState, action) {
switch (action.type) {
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'RECEIVED_NEW_BLOCK': {
if ((action.msg.blockNumber - state.blockNumber) > state.confirmations) {

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import humps from 'humps'
import numeral from 'numeral'
import socket from '../socket'
@ -18,7 +18,7 @@ export const initialState = {
export function reducer (state = initialState, action) {
switch (action.type) {
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'CHANNEL_DISCONNECTED': {
return Object.assign({}, state, {

@ -1,5 +1,5 @@
import $ from 'jquery'
import _ from 'lodash'
import omit from 'lodash/omit'
import URI from 'urijs'
import humps from 'humps'
import { subscribeChannel } from '../socket'
@ -15,7 +15,7 @@ export function reducer (state = initialState, action) {
switch (action.type) {
case 'PAGE_LOAD':
case 'ELEMENTS_LOAD': {
return Object.assign({}, state, _.omit(action, 'type'))
return Object.assign({}, state, omit(action, 'type'))
}
case 'CHANNEL_DISCONNECTED': {
if (state.beyondPageOne) return state

@ -1,7 +1,7 @@
const path = require('path');
const ExtractTextPlugin = require('extract-text-webpack-plugin');
const CopyWebpackPlugin = require('copy-webpack-plugin');
const { ContextReplacementPlugin } = require('webpack')
const { ContextReplacementPlugin } = require('webpack');
const glob = require("glob");
function transpileViewScript(file) {

@ -1,44 +1,10 @@
defmodule BlockScoutWeb.API.RPC.EthController do
use BlockScoutWeb, :controller
alias Ecto.Type, as: EctoType
alias Explorer.{Chain, Repo}
alias Explorer.Chain.{Block, Data, Hash, Hash.Address, Wei}
alias Explorer.Etherscan.Logs
@methods %{
"eth_getBalance" => %{
action: :eth_get_balance,
notes: """
the `earliest` parameter will not work as expected currently, because genesis block balances
are not currently imported
""",
example: """
{"id": 0, "jsonrpc": "2.0", "method": "eth_getBalance", "params": ["0x0000000000000000000000000000000000000007", "2"]}
"""
},
"eth_getLogs" => %{
action: :eth_get_logs,
notes: """
Will never return more than 1000 log entries.
""",
example: """
{"id": 0, "jsonrpc": "2.0", "method": "eth_getLogs", "params": [{"address": "0x0000000000000000000000000000000000000026","topics": ["0x01"]}]}
"""
}
}
@index_to_word %{
0 => "first",
1 => "second",
2 => "third",
3 => "fourth"
}
def methods, do: @methods
alias Explorer.EthRPC
def eth_request(%{body_params: %{"_json" => requests}} = conn, _) when is_list(requests) do
responses = responses(requests)
responses = EthRPC.responses(requests)
conn
|> put_status(200)
@ -46,7 +12,7 @@ defmodule BlockScoutWeb.API.RPC.EthController do
end
def eth_request(%{body_params: %{"_json" => request}} = conn, _) do
[response] = responses([request])
[response] = EthRPC.responses([request])
conn
|> put_status(200)
@ -65,297 +31,10 @@ defmodule BlockScoutWeb.API.RPC.EthController do
_ -> request
end
[response] = responses([decoded_request])
[response] = EthRPC.responses([decoded_request])
conn
|> put_status(200)
|> render("response.json", %{response: response})
end
def eth_get_balance(address_param, block_param \\ nil) do
with {:address, {:ok, address}} <- {:address, Chain.string_to_address_hash(address_param)},
{:block, {:ok, block}} <- {:block, block_param(block_param)},
{:balance, {:ok, balance}} <- {:balance, Chain.get_balance_as_of_block(address, block)} do
{:ok, Wei.hex_format(balance)}
else
{:address, :error} ->
{:error, "Query parameter 'address' is invalid"}
{:block, :error} ->
{:error, "Query parameter 'block' is invalid"}
{:balance, {:error, :not_found}} ->
{:error, "Balance not found"}
end
end
def eth_get_logs(filter_options) do
with {:ok, address_or_topic_params} <- address_or_topic_params(filter_options),
{:ok, from_block_param, to_block_param} <- logs_blocks_filter(filter_options),
{:ok, from_block} <- cast_block(from_block_param),
{:ok, to_block} <- cast_block(to_block_param) do
filter =
address_or_topic_params
|> Map.put(:from_block, from_block)
|> Map.put(:to_block, to_block)
|> Map.put(:allow_non_consensus, true)
{:ok, filter |> Logs.list_logs() |> Enum.map(&render_log/1)}
else
{:error, message} when is_bitstring(message) ->
{:error, message}
{:error, :empty} ->
{:ok, []}
_ ->
{:error, "Something went wrong."}
end
end
defp render_log(log) do
topics =
Enum.reject(
[log.first_topic, log.second_topic, log.third_topic, log.fourth_topic],
&is_nil/1
)
%{
"address" => to_string(log.address_hash),
"blockHash" => to_string(log.block_hash),
"blockNumber" => Integer.to_string(log.block_number, 16),
"data" => to_string(log.data),
"logIndex" => Integer.to_string(log.index, 16),
"removed" => log.block_consensus == false,
"topics" => topics,
"transactionHash" => to_string(log.transaction_hash),
"transactionIndex" => log.transaction_index,
"transactionLogIndex" => log.index,
"type" => "mined"
}
end
defp cast_block("0x" <> hexadecimal_digits = input) do
case Integer.parse(hexadecimal_digits, 16) do
{integer, ""} -> {:ok, integer}
_ -> {:error, input <> " is not a valid block number"}
end
end
defp cast_block(integer) when is_integer(integer), do: {:ok, integer}
defp cast_block(_), do: {:error, "invalid block number"}
defp address_or_topic_params(filter_options) do
address_param = Map.get(filter_options, "address")
topics_param = Map.get(filter_options, "topics")
with {:ok, address} <- validate_address(address_param),
{:ok, topics} <- validate_topics(topics_param) do
address_and_topics(address, topics)
end
end
defp address_and_topics(nil, nil), do: {:error, "Must supply one of address and topics"}
defp address_and_topics(address, nil), do: {:ok, %{address_hash: address}}
defp address_and_topics(nil, topics), do: {:ok, topics}
defp address_and_topics(address, topics), do: {:ok, Map.put(topics, :address_hash, address)}
defp validate_address(nil), do: {:ok, nil}
defp validate_address(address) do
case Address.cast(address) do
{:ok, address} -> {:ok, address}
:error -> {:error, "invalid address"}
end
end
defp validate_topics(nil), do: {:ok, nil}
defp validate_topics([]), do: []
defp validate_topics(topics) when is_list(topics) do
topics
|> Stream.with_index()
|> Enum.reduce({:ok, %{}}, fn {topic, index}, {:ok, acc} ->
case cast_topics(topic) do
{:ok, data} ->
with_filter = Map.put(acc, String.to_existing_atom("#{@index_to_word[index]}_topic"), data)
{:ok, add_operator(with_filter, index)}
:error ->
{:error, "invalid topics"}
end
end)
end
defp add_operator(filters, 0), do: filters
defp add_operator(filters, index) do
Map.put(filters, String.to_existing_atom("topic#{index - 1}_#{index}_opr"), "and")
end
defp cast_topics(topics) when is_list(topics) do
case EctoType.cast({:array, Data}, topics) do
{:ok, data} -> {:ok, Enum.map(data, &to_string/1)}
:error -> :error
end
end
defp cast_topics(topic) do
case Data.cast(topic) do
{:ok, data} -> {:ok, to_string(data)}
:error -> :error
end
end
defp responses(requests) do
Enum.map(requests, fn request ->
with {:id, {:ok, id}} <- {:id, Map.fetch(request, "id")},
{:request, {:ok, result}} <- {:request, do_eth_request(request)} do
format_success(result, id)
else
{:id, :error} -> format_error("id is a required field", 0)
{:request, {:error, message}} -> format_error(message, Map.get(request, "id"))
end
end)
end
defp logs_blocks_filter(filter_options) do
with {:filter, %{"blockHash" => block_hash_param}} <- {:filter, filter_options},
{:block_hash, {:ok, block_hash}} <- {:block_hash, Hash.Full.cast(block_hash_param)},
{:block, %{number: number}} <- {:block, Repo.get(Block, block_hash)} do
{:ok, number, number}
else
{:filter, filters} ->
from_block = Map.get(filters, "fromBlock", "latest")
to_block = Map.get(filters, "toBlock", "latest")
max_block_number =
if from_block == "latest" || to_block == "latest" do
max_consensus_block_number()
end
pending_block_number =
if from_block == "pending" || to_block == "pending" do
max_non_consensus_block_number(max_block_number)
end
if is_nil(pending_block_number) && from_block == "pending" && to_block == "pending" do
{:error, :empty}
else
to_block_numbers(from_block, to_block, max_block_number, pending_block_number)
end
{:block, _} ->
{:error, "Invalid Block Hash"}
{:block_hash, _} ->
{:error, "Invalid Block Hash"}
end
end
defp to_block_numbers(from_block, to_block, max_block_number, pending_block_number) do
actual_pending_block_number = pending_block_number || max_block_number
with {:ok, from} <-
to_block_number(from_block, max_block_number, actual_pending_block_number),
{:ok, to} <- to_block_number(to_block, max_block_number, actual_pending_block_number) do
{:ok, from, to}
end
end
defp to_block_number(integer, _, _) when is_integer(integer), do: {:ok, integer}
defp to_block_number("latest", max_block_number, _), do: {:ok, max_block_number || 0}
defp to_block_number("earliest", _, _), do: {:ok, 0}
defp to_block_number("pending", max_block_number, nil), do: {:ok, max_block_number || 0}
defp to_block_number("pending", _, pending), do: {:ok, pending}
defp to_block_number("0x" <> number, _, _) do
case Integer.parse(number, 16) do
{integer, ""} -> {:ok, integer}
_ -> {:error, "invalid block number"}
end
end
defp to_block_number(number, _, _) when is_bitstring(number) do
case Integer.parse(number, 16) do
{integer, ""} -> {:ok, integer}
_ -> {:error, "invalid block number"}
end
end
defp to_block_number(_, _, _), do: {:error, "invalid block number"}
defp max_non_consensus_block_number(max) do
case Chain.max_non_consensus_block_number(max) do
{:ok, number} -> number
_ -> nil
end
end
defp max_consensus_block_number do
case Chain.max_consensus_block_number() do
{:ok, number} -> number
_ -> nil
end
end
defp format_success(result, id) do
%{result: result, id: id}
end
defp format_error(message, id) do
%{error: message, id: id}
end
defp do_eth_request(%{"jsonrpc" => rpc_version}) when rpc_version != "2.0" do
{:error, "invalid rpc version"}
end
defp do_eth_request(%{"jsonrpc" => "2.0", "method" => method, "params" => params})
when is_list(params) do
with {:ok, action} <- get_action(method),
{:correct_arity, true} <-
{:correct_arity, :erlang.function_exported(__MODULE__, action, Enum.count(params))} do
apply(__MODULE__, action, params)
else
{:correct_arity, _} ->
{:error, "Incorrect number of params."}
_ ->
{:error, "Action not found."}
end
end
defp do_eth_request(%{"params" => _params, "method" => _}) do
{:error, "Invalid params. Params must be a list."}
end
defp do_eth_request(_) do
{:error, "Method, params, and jsonrpc, are all required parameters."}
end
defp get_action(action) do
case Map.get(@methods, action) do
%{action: action} ->
{:ok, action}
_ ->
:error
end
end
defp block_param("latest"), do: {:ok, :latest}
defp block_param("earliest"), do: {:ok, :earliest}
defp block_param("pending"), do: {:ok, :pending}
defp block_param(string_integer) when is_bitstring(string_integer) do
case Integer.parse(string_integer) do
{integer, ""} -> {:ok, integer}
_ -> :error
end
end
defp block_param(nil), do: {:ok, :latest}
defp block_param(_), do: :error
end

@ -1,8 +1,8 @@
defmodule BlockScoutWeb.APIDocsController do
use BlockScoutWeb, :controller
alias BlockScoutWeb.API.RPC.EthController
alias BlockScoutWeb.Etherscan
alias Explorer.EthRPC
def index(conn, _params) do
conn
@ -12,7 +12,7 @@ defmodule BlockScoutWeb.APIDocsController do
def eth_rpc(conn, _params) do
conn
|> assign(:documentation, EthController.methods())
|> assign(:documentation, EthRPC.methods())
|> render("eth_rpc.html")
end
end

@ -125,6 +125,57 @@ defmodule BlockScoutWeb.API.RPC.EthControllerTest do
assert [%{"data" => "0x010101"}, %{"data" => "0x020202"}] = Enum.sort_by(response["result"], &Map.get(&1, "data"))
end
test "paginates logs", %{conn: conn, api_params: api_params} do
contract_address = insert(:contract_address)
transaction =
:transaction
|> insert(to_address: contract_address)
|> with_block()
inserted_records =
insert_list(2000, :log, address: contract_address, transaction: transaction, first_topic: "0x01")
params = params(api_params, [%{"address" => to_string(contract_address), "topics" => [["0x01"]]}])
assert response =
conn
|> post("/api/eth_rpc", params)
|> json_response(200)
assert Enum.count(response["result"]) == 1000
{last_log_index, ""} = Integer.parse(List.last(response["result"])["logIndex"], 16)
next_page_params = %{
"blockNumber" => Integer.to_string(transaction.block_number, 16),
"transactionIndex" => transaction.index,
"logIndex" => Integer.to_string(last_log_index, 16)
}
new_params =
params(api_params, [
%{"paging_options" => next_page_params, "address" => to_string(contract_address), "topics" => [["0x01"]]}
])
assert new_response =
conn
|> post("/api/eth_rpc", new_params)
|> json_response(200)
assert Enum.count(response["result"]) == 1000
all_found_logs = response["result"] ++ new_response["result"]
assert Enum.all?(inserted_records, fn record ->
Enum.any?(all_found_logs, fn found_log ->
{index, ""} = Integer.parse(found_log["logIndex"], 16)
record.index == index
end)
end)
end
test "with a matching address and multiple topic matches in different positions", %{
conn: conn,
api_params: api_params

@ -416,7 +416,8 @@ defmodule Explorer.Chain.Transaction do
candidates_query =
from(
contract_method in ContractMethod,
where: contract_method.identifier == ^method_id
where: contract_method.identifier == ^method_id,
limit: 1
)
candidates =

@ -0,0 +1,371 @@
defmodule Explorer.EthRPC do
@moduledoc """
Ethreum JSON RPC methods logic implementation.
"""
alias Ecto.Type, as: EctoType
alias Explorer.{Chain, Repo}
alias Explorer.Chain.{Block, Data, Hash, Hash.Address, Wei}
alias Explorer.Etherscan.Logs
@methods %{
"eth_getBalance" => %{
action: :eth_get_balance,
notes: """
the `earliest` parameter will not work as expected currently, because genesis block balances
are not currently imported
""",
example: """
{"id": 0, "jsonrpc": "2.0", "method": "eth_getBalance", "params": ["0x0000000000000000000000000000000000000007", "2"]}
"""
},
"eth_getLogs" => %{
action: :eth_get_logs,
notes: """
Will never return more than 1000 log entries.\n
For this reason, you can use pagination options to request the next page. Pagination options params: {"logIndex": "3D", "blockNumber": "6423AC", "transactionIndex": 53} which include parameters from the last log received from the previous request. These three parameters are required for pagination.
""",
example: """
{"id": 0, "jsonrpc": "2.0", "method": "eth_getLogs",
"params": [
{"address": "0xc78Be425090Dbd437532594D12267C5934Cc6c6f",
"paging_options": {"logIndex": "3D", "blockNumber": "6423AC", "transactionIndex": 53},
"fromBlock": "earliest",
"toBlock": "latest",
"topics": ["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"]}]}
"""
}
}
@index_to_word %{
0 => "first",
1 => "second",
2 => "third",
3 => "fourth"
}
def responses(requests) do
Enum.map(requests, fn request ->
with {:id, {:ok, id}} <- {:id, Map.fetch(request, "id")},
{:request, {:ok, result}} <- {:request, do_eth_request(request)} do
format_success(result, id)
else
{:id, :error} -> format_error("id is a required field", 0)
{:request, {:error, message}} -> format_error(message, Map.get(request, "id"))
end
end)
end
def eth_get_balance(address_param, block_param \\ nil) do
with {:address, {:ok, address}} <- {:address, Chain.string_to_address_hash(address_param)},
{:block, {:ok, block}} <- {:block, block_param(block_param)},
{:balance, {:ok, balance}} <- {:balance, Chain.get_balance_as_of_block(address, block)} do
{:ok, Wei.hex_format(balance)}
else
{:address, :error} ->
{:error, "Query parameter 'address' is invalid"}
{:block, :error} ->
{:error, "Query parameter 'block' is invalid"}
{:balance, {:error, :not_found}} ->
{:error, "Balance not found"}
end
end
def eth_get_logs(filter_options) do
with {:ok, address_or_topic_params} <- address_or_topic_params(filter_options),
{:ok, from_block_param, to_block_param} <- logs_blocks_filter(filter_options),
{:ok, from_block} <- cast_block(from_block_param),
{:ok, to_block} <- cast_block(to_block_param),
{:ok, paging_options} <- paging_options(filter_options) do
filter =
address_or_topic_params
|> Map.put(:from_block, from_block)
|> Map.put(:to_block, to_block)
|> Map.put(:allow_non_consensus, true)
logs =
filter
|> Logs.list_logs(paging_options)
|> Enum.map(&render_log/1)
{:ok, logs}
else
{:error, message} when is_bitstring(message) ->
{:error, message}
{:error, :empty} ->
{:ok, []}
_ ->
{:error, "Something went wrong."}
end
end
defp render_log(log) do
topics =
Enum.reject(
[log.first_topic, log.second_topic, log.third_topic, log.fourth_topic],
&is_nil/1
)
%{
"address" => to_string(log.address_hash),
"blockHash" => to_string(log.block_hash),
"blockNumber" => Integer.to_string(log.block_number, 16),
"data" => to_string(log.data),
"logIndex" => Integer.to_string(log.index, 16),
"removed" => log.block_consensus == false,
"topics" => topics,
"transactionHash" => to_string(log.transaction_hash),
"transactionIndex" => log.transaction_index,
"transactionLogIndex" => log.index,
"type" => "mined"
}
end
defp cast_block("0x" <> hexadecimal_digits = input) do
case Integer.parse(hexadecimal_digits, 16) do
{integer, ""} -> {:ok, integer}
_ -> {:error, input <> " is not a valid block number"}
end
end
defp cast_block(integer) when is_integer(integer), do: {:ok, integer}
defp cast_block(_), do: {:error, "invalid block number"}
defp address_or_topic_params(filter_options) do
address_param = Map.get(filter_options, "address")
topics_param = Map.get(filter_options, "topics")
with {:ok, address} <- validate_address(address_param),
{:ok, topics} <- validate_topics(topics_param) do
address_and_topics(address, topics)
end
end
defp address_and_topics(nil, nil), do: {:error, "Must supply one of address and topics"}
defp address_and_topics(address, nil), do: {:ok, %{address_hash: address}}
defp address_and_topics(nil, topics), do: {:ok, topics}
defp address_and_topics(address, topics), do: {:ok, Map.put(topics, :address_hash, address)}
defp validate_address(nil), do: {:ok, nil}
defp validate_address(address) do
case Address.cast(address) do
{:ok, address} -> {:ok, address}
:error -> {:error, "invalid address"}
end
end
defp validate_topics(nil), do: {:ok, nil}
defp validate_topics([]), do: []
defp validate_topics(topics) when is_list(topics) do
topics
|> Stream.with_index()
|> Enum.reduce({:ok, %{}}, fn {topic, index}, {:ok, acc} ->
case cast_topics(topic) do
{:ok, data} ->
with_filter = Map.put(acc, String.to_existing_atom("#{@index_to_word[index]}_topic"), data)
{:ok, add_operator(with_filter, index)}
:error ->
{:error, "invalid topics"}
end
end)
end
defp add_operator(filters, 0), do: filters
defp add_operator(filters, index) do
Map.put(filters, String.to_existing_atom("topic#{index - 1}_#{index}_opr"), "and")
end
defp cast_topics(topics) when is_list(topics) do
case EctoType.cast({:array, Data}, topics) do
{:ok, data} -> {:ok, Enum.map(data, &to_string/1)}
:error -> :error
end
end
defp cast_topics(topic) do
case Data.cast(topic) do
{:ok, data} -> {:ok, to_string(data)}
:error -> :error
end
end
defp logs_blocks_filter(filter_options) do
with {:filter, %{"blockHash" => block_hash_param}} <- {:filter, filter_options},
{:block_hash, {:ok, block_hash}} <- {:block_hash, Hash.Full.cast(block_hash_param)},
{:block, %{number: number}} <- {:block, Repo.get(Block, block_hash)} do
{:ok, number, number}
else
{:filter, filters} ->
from_block = Map.get(filters, "fromBlock", "latest")
to_block = Map.get(filters, "toBlock", "latest")
max_block_number =
if from_block == "latest" || to_block == "latest" do
max_consensus_block_number()
end
pending_block_number =
if from_block == "pending" || to_block == "pending" do
max_non_consensus_block_number(max_block_number)
end
if is_nil(pending_block_number) && from_block == "pending" && to_block == "pending" do
{:error, :empty}
else
to_block_numbers(from_block, to_block, max_block_number, pending_block_number)
end
{:block, _} ->
{:error, "Invalid Block Hash"}
{:block_hash, _} ->
{:error, "Invalid Block Hash"}
end
end
defp paging_options(%{
"paging_options" => %{
"logIndex" => log_index,
"transactionIndex" => transaction_index,
"blockNumber" => block_number
}
})
when is_integer(transaction_index) do
with {:ok, parsed_block_number} <- to_number(block_number, "invalid block number"),
{:ok, parsed_log_index} <- to_number(log_index, "invalid log index") do
{:ok,
%{
log_index: parsed_log_index,
transaction_index: transaction_index,
block_number: parsed_block_number
}}
end
end
defp paging_options(_), do: {:ok, nil}
defp to_block_numbers(from_block, to_block, max_block_number, pending_block_number) do
actual_pending_block_number = pending_block_number || max_block_number
with {:ok, from} <-
to_block_number(from_block, max_block_number, actual_pending_block_number),
{:ok, to} <- to_block_number(to_block, max_block_number, actual_pending_block_number) do
{:ok, from, to}
end
end
defp to_block_number(integer, _, _) when is_integer(integer), do: {:ok, integer}
defp to_block_number("latest", max_block_number, _), do: {:ok, max_block_number || 0}
defp to_block_number("earliest", _, _), do: {:ok, 0}
defp to_block_number("pending", max_block_number, nil), do: {:ok, max_block_number || 0}
defp to_block_number("pending", _, pending), do: {:ok, pending}
defp to_block_number("0x" <> number, _, _) do
case Integer.parse(number, 16) do
{integer, ""} -> {:ok, integer}
_ -> {:error, "invalid block number"}
end
end
defp to_block_number(number, _, _) when is_bitstring(number) do
case Integer.parse(number, 16) do
{integer, ""} -> {:ok, integer}
_ -> {:error, "invalid block number"}
end
end
defp to_block_number(_, _, _), do: {:error, "invalid block number"}
defp to_number(number, error_message) when is_bitstring(number) do
case Integer.parse(number, 16) do
{integer, ""} -> {:ok, integer}
_ -> {:error, error_message}
end
end
defp to_number(_, error_message), do: {:error, error_message}
defp max_non_consensus_block_number(max) do
case Chain.max_non_consensus_block_number(max) do
{:ok, number} -> number
_ -> nil
end
end
defp max_consensus_block_number do
case Chain.max_consensus_block_number() do
{:ok, number} -> number
_ -> nil
end
end
defp format_success(result, id) do
%{result: result, id: id}
end
defp format_error(message, id) do
%{error: message, id: id}
end
defp do_eth_request(%{"jsonrpc" => rpc_version}) when rpc_version != "2.0" do
{:error, "invalid rpc version"}
end
defp do_eth_request(%{"jsonrpc" => "2.0", "method" => method, "params" => params})
when is_list(params) do
with {:ok, action} <- get_action(method),
{:correct_arity, true} <-
{:correct_arity, :erlang.function_exported(__MODULE__, action, Enum.count(params))} do
apply(__MODULE__, action, params)
else
{:correct_arity, _} ->
{:error, "Incorrect number of params."}
_ ->
{:error, "Action not found."}
end
end
defp do_eth_request(%{"params" => _params, "method" => _}) do
{:error, "Invalid params. Params must be a list."}
end
defp do_eth_request(_) do
{:error, "Method, params, and jsonrpc, are all required parameters."}
end
defp get_action(action) do
case Map.get(@methods, action) do
%{action: action} ->
{:ok, action}
_ ->
:error
end
end
defp block_param("latest"), do: {:ok, :latest}
defp block_param("earliest"), do: {:ok, :earliest}
defp block_param("pending"), do: {:ok, :pending}
defp block_param(string_integer) when is_bitstring(string_integer) do
case Integer.parse(string_integer) do
{integer, ""} -> {:ok, integer}
_ -> :error
end
end
defp block_param(nil), do: {:ok, :latest}
defp block_param(_), do: :error
def methods, do: @methods
end

@ -5,7 +5,7 @@ defmodule Explorer.Etherscan.Logs do
"""
import Ecto.Query, only: [from: 2, where: 3, subquery: 1]
import Ecto.Query, only: [from: 2, where: 3, subquery: 1, order_by: 3]
alias Explorer.Chain.{Block, InternalTransaction, Log, Transaction}
alias Explorer.Repo
@ -38,6 +38,8 @@ defmodule Explorer.Etherscan.Logs do
:type
]
@default_paging_options %{block_number: nil, transaction_index: nil, log_index: nil}
@doc """
Gets a list of logs that meet the criteria in a given filter map.
@ -68,7 +70,10 @@ defmodule Explorer.Etherscan.Logs do
"""
@spec list_logs(map()) :: [map()]
def list_logs(%{address_hash: address_hash} = filter) when not is_nil(address_hash) do
def list_logs(filter, paging_options \\ @default_paging_options)
def list_logs(%{address_hash: address_hash} = filter, paging_options) when not is_nil(address_hash) do
paging_options = if is_nil(paging_options), do: @default_paging_options, else: paging_options
prepared_filter = Map.merge(@base_filter, filter)
logs_query = where_topic_match(Log, prepared_filter)
@ -134,14 +139,18 @@ defmodule Explorer.Etherscan.Logs do
)
end
Repo.all(query_with_consensus)
query_with_consensus
|> order_by([log], asc: log.index)
|> page_logs(paging_options)
|> Repo.all()
end
# Since address_hash was not present, we know that a
# topic filter has been applied, so we use a different
# query that is optimized for a logs filter over an
# address_hash
def list_logs(filter) do
def list_logs(filter, paging_options) do
paging_options = if is_nil(paging_options), do: @default_paging_options, else: paging_options
prepared_filter = Map.merge(@base_filter, filter)
logs_query = where_topic_match(Log, prepared_filter)
@ -182,7 +191,10 @@ defmodule Explorer.Etherscan.Logs do
select_merge: map(log, ^@log_fields)
)
Repo.all(query_with_block_transaction_data)
query_with_block_transaction_data
|> order_by([log], asc: log.index)
|> page_logs(paging_options)
|> Repo.all()
end
@topics [
@ -231,4 +243,17 @@ defmodule Explorer.Etherscan.Logs do
end
defp where_multiple_topics_match(query, _, _, _), do: query
defp page_logs(query, %{block_number: nil, transaction_index: nil, log_index: nil}) do
query
end
defp page_logs(query, %{block_number: block_number, transaction_index: transaction_index, log_index: log_index}) do
from(
data in query,
where:
data.index > ^log_index and data.block_number >= ^block_number and
data.transaction_index >= ^transaction_index
)
end
end

@ -158,6 +158,46 @@ defmodule Explorer.Etherscan.LogsTest do
assert found_log.transaction_hash == transaction_block1.hash
end
test "paginates logs" do
contract_address = insert(:contract_address)
transaction =
%Transaction{block: block} =
:transaction
|> insert(to_address: contract_address)
|> with_block()
inserted_records = insert_list(2000, :log, address: contract_address, transaction: transaction)
filter = %{
from_block: block.number,
to_block: block.number,
address_hash: contract_address.hash
}
first_found_logs = Logs.list_logs(filter)
assert Enum.count(first_found_logs) == 1_000
last_record = List.last(first_found_logs)
next_page_params = %{
log_index: last_record.index,
transaction_index: last_record.transaction_index,
block_number: transaction.block_number
}
second_found_logs = Logs.list_logs(filter, next_page_params)
assert Enum.count(second_found_logs) == 1_000
all_found_logs = first_found_logs ++ second_found_logs
assert Enum.all?(inserted_records, fn record ->
Enum.any?(all_found_logs, fn found_log -> found_log.index == record.index end)
end)
end
test "with a valid topic{x}" do
contract_address = insert(:contract_address)

Loading…
Cancel
Save