commit
d2c4415dd8
@ -0,0 +1,124 @@ |
||||
import { reducer, initialState } from '../../../js/pages/address/transactions' |
||||
|
||||
describe('RECEIVED_NEW_TRANSACTION', () => { |
||||
test('with new transaction', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
items: ['transaction html'] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { transactionHtml: 'another transaction html' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.items).toEqual([ 'another transaction html', 'transaction html' ]) |
||||
}) |
||||
|
||||
test('when channel has been disconnected', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
channelDisconnected: true, |
||||
items: ['transaction html'] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { transactionHtml: 'another transaction html' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.items).toEqual(['transaction html']) |
||||
}) |
||||
|
||||
test('beyond page one', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
beyondPageOne: true, |
||||
items: ['transaction html'] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { transactionHtml: 'another transaction html' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.items).toEqual([ 'transaction html' ]) |
||||
}) |
||||
|
||||
test('adds the new transaction to state even when it is filtered by to', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
addressHash: '0x001', |
||||
filter: 'to', |
||||
items: [] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
fromAddressHash: '0x002', |
||||
transactionHtml: 'transaction html', |
||||
toAddressHash: '0x001' |
||||
} |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.items).toEqual(['transaction html']) |
||||
}) |
||||
|
||||
test( |
||||
'does nothing when it is filtered by to but the toAddressHash is different from addressHash', |
||||
() => { |
||||
const state = Object.assign({}, initialState, { |
||||
addressHash: '0x001', |
||||
filter: 'to', |
||||
items: [] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
fromAddressHash: '0x003', |
||||
transactionHtml: 'transaction html', |
||||
toAddressHash: '0x002' |
||||
} |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.items).toEqual([]) |
||||
}) |
||||
|
||||
test('adds the new transaction to state even when it is filtered by from', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
addressHash: '0x001', |
||||
filter: 'from', |
||||
items: [] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
fromAddressHash: '0x001', |
||||
transactionHtml: 'transaction html', |
||||
toAddressHash: '0x002' |
||||
} |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.items).toEqual(['transaction html']) |
||||
}) |
||||
|
||||
test( |
||||
'does nothing when it is filtered by from but the fromAddressHash is different from addressHash', |
||||
() => { |
||||
const state = Object.assign({}, initialState, { |
||||
addressHash: '0x001', |
||||
filter: 'to', |
||||
items: [] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
addressHash: '0x001', |
||||
transactionHtml: 'transaction html', |
||||
fromAddressHash: '0x002' |
||||
} |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.items).toEqual([]) |
||||
}) |
||||
}) |
@ -0,0 +1,73 @@ |
||||
import $ from 'jquery' |
||||
import _ from 'lodash' |
||||
import URI from 'urijs' |
||||
import humps from 'humps' |
||||
import socket from '../../socket' |
||||
import { connectElements } from '../../lib/redux_helpers.js' |
||||
import { createAsyncLoadStore } from '../../lib/async_listing_load' |
||||
|
||||
export const initialState = { |
||||
addressHash: null, |
||||
channelDisconnected: false, |
||||
filter: null |
||||
} |
||||
|
||||
export function reducer (state, action) { |
||||
switch (action.type) { |
||||
case 'PAGE_LOAD': |
||||
case 'ELEMENTS_LOAD': { |
||||
return Object.assign({}, state, _.omit(action, 'type')) |
||||
} |
||||
case 'CHANNEL_DISCONNECTED': { |
||||
if (state.beyondPageOne) return state |
||||
|
||||
return Object.assign({}, state, { channelDisconnected: true }) |
||||
} |
||||
case 'RECEIVED_NEW_TRANSACTION': { |
||||
if (state.channelDisconnected) return state |
||||
|
||||
if (state.beyondPageOne || |
||||
(state.filter === 'to' && action.msg.toAddressHash !== state.addressHash) || |
||||
(state.filter === 'from' && action.msg.fromAddressHash !== state.addressHash)) { |
||||
return state |
||||
} |
||||
|
||||
return Object.assign({}, state, { items: [ action.msg.transactionHtml, ...state.items ] }) |
||||
} |
||||
default: |
||||
return state |
||||
} |
||||
} |
||||
|
||||
const elements = { |
||||
'[data-selector="channel-disconnected-message"]': { |
||||
render ($el, state) { |
||||
if (state.channelDisconnected) $el.show() |
||||
} |
||||
} |
||||
} |
||||
|
||||
if ($('[data-page="address-transactions"]').length) { |
||||
const store = createAsyncLoadStore(reducer, initialState, 'dataset.transactionHash') |
||||
const addressHash = $('[data-page="address-details"]')[0].dataset.pageAddressHash |
||||
const { filter, blockNumber } = humps.camelizeKeys(URI(window.location).query(true)) |
||||
|
||||
connectElements({ store, elements }) |
||||
|
||||
store.dispatch({ |
||||
type: 'PAGE_LOAD', |
||||
addressHash, |
||||
filter, |
||||
beyondPageOne: !!blockNumber |
||||
}) |
||||
|
||||
const addressChannel = socket.channel(`addresses:${addressHash}`, {}) |
||||
addressChannel.join() |
||||
addressChannel.onError(() => store.dispatch({ type: 'CHANNEL_DISCONNECTED' })) |
||||
addressChannel.on('transaction', (msg) => { |
||||
store.dispatch({ |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: humps.camelizeKeys(msg) |
||||
}) |
||||
}) |
||||
} |
@ -0,0 +1,117 @@ |
||||
defmodule Explorer.Counters.AddessesWithBalanceCounter do |
||||
@moduledoc """ |
||||
Caches the number of addresses with fetched coin balance > 0. |
||||
|
||||
It loads the count asynchronously and in a time interval of 30 minutes. |
||||
""" |
||||
|
||||
use GenServer |
||||
|
||||
alias Explorer.Chain |
||||
|
||||
@table :addresses_with_balance_counter |
||||
|
||||
@cache_key "addresses_with_balance" |
||||
|
||||
def table_name do |
||||
@table |
||||
end |
||||
|
||||
def cache_key do |
||||
@cache_key |
||||
end |
||||
|
||||
# It is undesirable to automatically start the consolidation in all environments. |
||||
# Consider the test environment: if the consolidation initiates but does not |
||||
# finish before a test ends, that test will fail. This way, hundreds of |
||||
# tests were failing before disabling the consolidation and the scheduler in |
||||
# the test env. |
||||
config = Application.get_env(:explorer, Explorer.Counters.AddessesWithBalanceCounter) |
||||
@enable_consolidation Keyword.get(config, :enable_consolidation) |
||||
|
||||
@doc """ |
||||
Starts a process to periodically update the counter of the token holders. |
||||
""" |
||||
@spec start_link(term()) :: GenServer.on_start() |
||||
def start_link(_) do |
||||
GenServer.start_link(__MODULE__, :ok, name: __MODULE__) |
||||
end |
||||
|
||||
@impl true |
||||
def init(args) do |
||||
create_table() |
||||
|
||||
if enable_consolidation?() do |
||||
Task.start_link(&consolidate/0) |
||||
schedule_next_consolidation() |
||||
end |
||||
|
||||
{:ok, args} |
||||
end |
||||
|
||||
def create_table do |
||||
opts = [ |
||||
:set, |
||||
:named_table, |
||||
:public, |
||||
read_concurrency: true |
||||
] |
||||
|
||||
:ets.new(table_name(), opts) |
||||
end |
||||
|
||||
defp schedule_next_consolidation do |
||||
if enable_consolidation?() do |
||||
Process.send_after(self(), :consolidate, :timer.minutes(30)) |
||||
end |
||||
end |
||||
|
||||
@doc """ |
||||
Inserts new items into the `:ets` table. |
||||
""" |
||||
def insert_counter({key, info}) do |
||||
:ets.insert(table_name(), {key, info}) |
||||
end |
||||
|
||||
@impl true |
||||
def handle_info(:consolidate, state) do |
||||
consolidate() |
||||
|
||||
schedule_next_consolidation() |
||||
|
||||
{:noreply, state} |
||||
end |
||||
|
||||
@doc """ |
||||
Fetches the info for a specific item from the `:ets` table. |
||||
""" |
||||
def fetch do |
||||
do_fetch(:ets.lookup(table_name(), cache_key())) |
||||
end |
||||
|
||||
defp do_fetch([{_, result}]), do: result |
||||
defp do_fetch([]), do: 0 |
||||
|
||||
@doc """ |
||||
Consolidates the info by populating the `:ets` table with the current database information. |
||||
""" |
||||
def consolidate do |
||||
counter = Chain.count_addresses_with_balance() |
||||
|
||||
insert_counter({cache_key(), counter}) |
||||
end |
||||
|
||||
@doc """ |
||||
Returns a boolean that indicates whether consolidation is enabled |
||||
|
||||
In order to choose whether or not to enable the scheduler and the initial |
||||
consolidation, change the following Explorer config: |
||||
|
||||
`config :explorer, Explorer.Counters.AddressesWithBalanceCounter, enable_consolidation: true` |
||||
|
||||
to: |
||||
|
||||
`config :explorer, Explorer.Counters.AddressesWithBalanceCounter, enable_consolidation: false` |
||||
""" |
||||
def enable_consolidation?, do: @enable_consolidation |
||||
end |
@ -0,0 +1,15 @@ |
||||
defmodule Explorer.Counters.AddessesWithBalanceCounterTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Counters.AddessesWithBalanceCounter |
||||
|
||||
test "populates the cache with the number of addresses with fetched coin balance greater than 0" do |
||||
insert(:address, fetched_coin_balance: 0) |
||||
insert(:address, fetched_coin_balance: 1) |
||||
insert(:address, fetched_coin_balance: 2) |
||||
|
||||
AddessesWithBalanceCounter.consolidate() |
||||
|
||||
assert AddessesWithBalanceCounter.fetch() == 2 |
||||
end |
||||
end |
Loading…
Reference in new issue