commit
f8061ba3f2
@ -1,300 +1,327 @@ |
||||
import { reducer, initialState } from '../../js/pages/address' |
||||
|
||||
describe('PAGE_LOAD', () => { |
||||
test('page 1 without filter', () => { |
||||
const state = initialState |
||||
describe('RECEIVED_NEW_BLOCK', () => { |
||||
test('with new block', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
validationCount: 30, |
||||
validatedBlocks: [{ blockNumber: 1, blockHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'PAGE_LOAD', |
||||
addressHash: '1234', |
||||
beyondPageOne: false, |
||||
pendingTransactionHashes: ['1'] |
||||
type: 'RECEIVED_NEW_BLOCK', |
||||
msg: { blockNumber: 2, blockHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.addressHash).toBe('1234') |
||||
expect(output.beyondPageOne).toBe(false) |
||||
expect(output.filter).toBe(undefined) |
||||
expect(output.pendingTransactionHashes).toEqual(['1']) |
||||
expect(output.validationCount).toEqual(31) |
||||
expect(output.validatedBlocks).toEqual([ |
||||
{ blockNumber: 2, blockHtml: 'test 2' }, |
||||
{ blockNumber: 1, blockHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('page 2 without filter', () => { |
||||
const state = initialState |
||||
test('when channel has been disconnected', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
channelDisconnected: true, |
||||
validationCount: 30, |
||||
validatedBlocks: [{ blockNumber: 1, blockHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'PAGE_LOAD', |
||||
addressHash: '1234', |
||||
beyondPageOne: true, |
||||
pendingTransactionHashes: ['1'] |
||||
type: 'RECEIVED_NEW_BLOCK', |
||||
msg: { blockNumber: 2, blockHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.addressHash).toBe('1234') |
||||
expect(output.beyondPageOne).toBe(true) |
||||
expect(output.filter).toBe(undefined) |
||||
expect(output.pendingTransactionHashes).toEqual(['1']) |
||||
expect(output.validationCount).toEqual(30) |
||||
expect(output.validatedBlocks).toEqual([ |
||||
{ blockNumber: 1, blockHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('page 1 with "to" filter', () => { |
||||
const state = initialState |
||||
test('beyond page one', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
beyondPageOne: true, |
||||
validationCount: 30, |
||||
validatedBlocks: [{ blockNumber: 1, blockHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'PAGE_LOAD', |
||||
addressHash: '1234', |
||||
beyondPageOne: false, |
||||
filter: 'to' |
||||
type: 'RECEIVED_NEW_BLOCK', |
||||
msg: { blockNumber: 2, blockHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.addressHash).toBe('1234') |
||||
expect(output.beyondPageOne).toBe(false) |
||||
expect(output.filter).toBe('to') |
||||
expect(output.validationCount).toEqual(31) |
||||
expect(output.validatedBlocks).toEqual([ |
||||
{ blockNumber: 1, blockHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('page 2 with "to" filter', () => { |
||||
const state = initialState |
||||
}) |
||||
|
||||
describe('RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH', () => { |
||||
test('with new internal transaction', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
internalTransactions: [{ internalTransactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'PAGE_LOAD', |
||||
addressHash: '1234', |
||||
beyondPageOne: true, |
||||
filter: 'to' |
||||
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH', |
||||
msgs: [{ internalTransactionHtml: 'test 2' }] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.addressHash).toBe('1234') |
||||
expect(output.beyondPageOne).toBe(true) |
||||
expect(output.filter).toBe('to') |
||||
expect(output.internalTransactions).toEqual([ |
||||
{ internalTransactionHtml: 'test 2' }, |
||||
{ internalTransactionHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
}) |
||||
|
||||
test('CHANNEL_DISCONNECTED', () => { |
||||
const state = initialState |
||||
const action = { |
||||
type: 'CHANNEL_DISCONNECTED' |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.channelDisconnected).toBe(true) |
||||
}) |
||||
|
||||
test('RECEIVED_UPDATED_BALANCE', () => { |
||||
const state = initialState |
||||
const action = { |
||||
type: 'RECEIVED_UPDATED_BALANCE', |
||||
msg: { |
||||
balance: 'hello world' |
||||
test('with batch of new internal transactions', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
internalTransactions: [{ internalTransactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH', |
||||
msgs: [ |
||||
{ internalTransactionHtml: 'test 2' }, |
||||
{ internalTransactionHtml: 'test 3' }, |
||||
{ internalTransactionHtml: 'test 4' }, |
||||
{ internalTransactionHtml: 'test 5' }, |
||||
{ internalTransactionHtml: 'test 6' }, |
||||
{ internalTransactionHtml: 'test 7' }, |
||||
{ internalTransactionHtml: 'test 8' }, |
||||
{ internalTransactionHtml: 'test 9' }, |
||||
{ internalTransactionHtml: 'test 10' }, |
||||
{ internalTransactionHtml: 'test 11' }, |
||||
{ internalTransactionHtml: 'test 12' }, |
||||
{ internalTransactionHtml: 'test 13' } |
||||
] |
||||
} |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.balance).toBe('hello world') |
||||
}) |
||||
const output = reducer(state, action) |
||||
|
||||
describe('RECEIVED_NEW_PENDING_TRANSACTION', () => { |
||||
test('single transaction', () => { |
||||
const state = initialState |
||||
expect(output.internalTransactions).toEqual([ |
||||
{ internalTransactionHtml: 'test 1' } |
||||
]) |
||||
expect(output.internalTransactionsBatch).toEqual([ |
||||
{ internalTransactionHtml: 'test 13' }, |
||||
{ internalTransactionHtml: 'test 12' }, |
||||
{ internalTransactionHtml: 'test 11' }, |
||||
{ internalTransactionHtml: 'test 10' }, |
||||
{ internalTransactionHtml: 'test 9' }, |
||||
{ internalTransactionHtml: 'test 8' }, |
||||
{ internalTransactionHtml: 'test 7' }, |
||||
{ internalTransactionHtml: 'test 6' }, |
||||
{ internalTransactionHtml: 'test 5' }, |
||||
{ internalTransactionHtml: 'test 4' }, |
||||
{ internalTransactionHtml: 'test 3' }, |
||||
{ internalTransactionHtml: 'test 2' }, |
||||
]) |
||||
}) |
||||
test('after batch of new internal transactions', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
internalTransactionsBatch: [{ internalTransactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION', |
||||
msg: { |
||||
transactionHash: '0x00', |
||||
transactionHtml: 'test' |
||||
} |
||||
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH', |
||||
msgs: [ |
||||
{ internalTransactionHtml: 'test 2' } |
||||
] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual(['test']) |
||||
expect(output.transactionCount).toEqual(null) |
||||
expect(output.internalTransactionsBatch).toEqual([ |
||||
{ internalTransactionHtml: 'test 2' }, |
||||
{ internalTransactionHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('single transaction after single transaction', () => { |
||||
test('when channel has been disconnected', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
newPendingTransactions: ['test 1'] |
||||
channelDisconnected: true, |
||||
internalTransactions: [{ internalTransactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION', |
||||
msg: { |
||||
transactionHash: '0x02', |
||||
transactionHtml: 'test 2' |
||||
} |
||||
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH', |
||||
msgs: [{ internalTransactionHtml: 'test 2' }] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual(['test 1', 'test 2']) |
||||
expect(output.pendingTransactionHashes.length).toEqual(1) |
||||
expect(output.internalTransactions).toEqual([ |
||||
{ internalTransactionHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('after disconnection', () => { |
||||
test('beyond page one', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
channelDisconnected: true |
||||
beyondPageOne: true, |
||||
internalTransactions: [{ internalTransactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION', |
||||
msg: { |
||||
transactionHash: '0x00', |
||||
transactionHtml: 'test' |
||||
} |
||||
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH', |
||||
msgs: [{ internalTransactionHtml: 'test 2' }] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual([]) |
||||
expect(output.pendingTransactionHashes).toEqual([]) |
||||
expect(output.internalTransactions).toEqual([ |
||||
{ internalTransactionHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('on page 2', () => { |
||||
test('with filtered out internal transaction', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
beyondPageOne: true |
||||
filter: 'to' |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION', |
||||
msg: { |
||||
transactionHash: '0x00', |
||||
transactionHtml: 'test' |
||||
} |
||||
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH', |
||||
msgs: [{ internalTransactionHtml: 'test 2' }] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual([]) |
||||
expect(output.pendingTransactionHashes).toEqual([]) |
||||
expect(output.internalTransactions).toEqual([]) |
||||
}) |
||||
}) |
||||
|
||||
describe('RECEIVED_NEW_TRANSACTION', () => { |
||||
test('single transaction', () => { |
||||
describe('RECEIVED_NEW_PENDING_TRANSACTION', () => { |
||||
test('with new pending transaction', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
addressHash: '0x111' |
||||
pendingTransactions: [{ transactionHash: 1, transactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
transactionHtml: 'test' |
||||
} |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION', |
||||
msg: { transactionHash: 2, transactionHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([{ transactionHtml: 'test' }]) |
||||
expect(output.transactionCount).toEqual(null) |
||||
expect(output.pendingTransactions).toEqual([ |
||||
{ transactionHash: 2, transactionHtml: 'test 2' }, |
||||
{ transactionHash: 1, transactionHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('single transaction after single transaction', () => { |
||||
test('when channel has been disconnected', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
newTransactions: [{ transactionHtml: 'test 1' }] |
||||
channelDisconnected: true, |
||||
pendingTransactions: [{ transactionHash: 1, transactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
transactionHtml: 'test 2' |
||||
} |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION', |
||||
msg: { transactionHash: 2, transactionHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([ |
||||
{ transactionHtml: 'test 1' }, |
||||
{ transactionHtml: 'test 2' } |
||||
expect(output.pendingTransactions).toEqual([ |
||||
{ transactionHash: 1, transactionHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('after disconnection', () => { |
||||
test('beyond page one', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
channelDisconnected: true |
||||
beyondPageOne: true, |
||||
pendingTransactions: [{ transactionHash: 1, transactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
transactionHtml: 'test' |
||||
} |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION', |
||||
msg: { transactionHash: 2, transactionHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([]) |
||||
expect(output.pendingTransactions).toEqual([ |
||||
{ transactionHash: 1, transactionHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('on page 2', () => { |
||||
test('with filtered out pending transaction', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
beyondPageOne: true, |
||||
transactionCount: 1, |
||||
addressHash: '0x111' |
||||
filter: 'to' |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
transactionHtml: 'test' |
||||
} |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION', |
||||
msg: { transactionHash: 2, transactionHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([]) |
||||
expect(output.transactionCount).toEqual(1) |
||||
expect(output.pendingTransactions).toEqual([]) |
||||
}) |
||||
test('transaction from current address with "from" filter', () => { |
||||
}) |
||||
|
||||
describe('RECEIVED_NEW_TRANSACTION', () => { |
||||
test('with new transaction', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
addressHash: '1234', |
||||
filter: 'from' |
||||
pendingTransactions: [{ transactionHash: 2, transactionHtml: 'test' }], |
||||
transactions: [{ transactionHash: 1, transactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
fromAddressHash: '1234', |
||||
transactionHtml: 'test' |
||||
} |
||||
msg: { transactionHash: 2, transactionHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([ |
||||
{ fromAddressHash: '1234', transactionHtml: 'test' } |
||||
expect(output.pendingTransactions).toEqual([ |
||||
{ transactionHash: 2, transactionHtml: 'test 2', validated: true } |
||||
]) |
||||
expect(output.transactions).toEqual([ |
||||
{ transactionHash: 2, transactionHtml: 'test 2' }, |
||||
{ transactionHash: 1, transactionHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('transaction from current address with "to" filter', () => { |
||||
test('when channel has been disconnected', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
addressHash: '1234', |
||||
filter: 'to' |
||||
channelDisconnected: true, |
||||
pendingTransactions: [{ transactionHash: 2, transactionHtml: 'test' }], |
||||
transactions: [{ transactionHash: 1, transactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
fromAddressHash: '1234', |
||||
transactionHtml: 'test' |
||||
} |
||||
msg: { transactionHash: 2, transactionHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([]) |
||||
expect(output.pendingTransactions).toEqual([ |
||||
{ transactionHash: 2, transactionHtml: 'test' } |
||||
]) |
||||
expect(output.transactions).toEqual([ |
||||
{ transactionHash: 1, transactionHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('transaction to current address with "to" filter', () => { |
||||
test('beyond page one', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
addressHash: '1234', |
||||
filter: 'to' |
||||
beyondPageOne: true, |
||||
transactions: [{ transactionHash: 1, transactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
toAddressHash: '1234', |
||||
transactionHtml: 'test' |
||||
} |
||||
msg: { transactionHash: 2, transactionHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([ |
||||
{ toAddressHash: '1234', transactionHtml: 'test' } |
||||
expect(output.pendingTransactions).toEqual([]) |
||||
expect(output.transactions).toEqual([ |
||||
{ transactionHash: 1, transactionHtml: 'test 1' } |
||||
]) |
||||
}) |
||||
test('transaction to current address with "from" filter', () => { |
||||
test('with filtered out transaction', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
addressHash: '1234', |
||||
filter: 'from' |
||||
filter: 'to' |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
toAddressHash: '1234', |
||||
transactionHtml: 'test' |
||||
} |
||||
msg: { transactionHash: 2, transactionHtml: 'test 2' } |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([]) |
||||
expect(output.transactions).toEqual([]) |
||||
}) |
||||
test('single transaction collated from pending', () => { |
||||
const state = initialState |
||||
}) |
||||
|
||||
describe('RECEIVED_NEXT_TRANSACTIONS_PAGE', () => { |
||||
test('with new transaction page', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
loadingNextPage: true, |
||||
nextPageUrl: '1', |
||||
transactions: [{ transactionHash: 1, transactionHtml: 'test 1' }] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
type: 'RECEIVED_NEXT_TRANSACTIONS_PAGE', |
||||
msg: { |
||||
transactionHash: '0x00', |
||||
transactionHtml: 'test' |
||||
nextPageUrl: '2', |
||||
transactions: [{ transactionHash: 2, transactionHtml: 'test 2' }] |
||||
} |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([ |
||||
{ transactionHash: '0x00', transactionHtml: 'test' } |
||||
expect(output.loadingNextPage).toEqual(false) |
||||
expect(output.nextPageUrl).toEqual('2') |
||||
expect(output.transactions).toEqual([ |
||||
{ transactionHash: 1, transactionHtml: 'test 1' }, |
||||
{ transactionHash: 2, transactionHtml: 'test 2' } |
||||
]) |
||||
expect(output.transactionCount).toEqual(null) |
||||
}) |
||||
}) |
||||
|
@ -1,4 +1,4 @@ |
||||
import { reducer, initialState } from '../../js/pages/block' |
||||
import { reducer, initialState } from '../../js/pages/blocks' |
||||
|
||||
test('CHANNEL_DISCONNECTED', () => { |
||||
const state = initialState |
@ -0,0 +1,232 @@ |
||||
import { reducer, initialState } from '../../js/pages/pending_transactions' |
||||
|
||||
test('CHANNEL_DISCONNECTED', () => { |
||||
const state = initialState |
||||
const action = { |
||||
type: 'CHANNEL_DISCONNECTED' |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.channelDisconnected).toBe(true) |
||||
}) |
||||
|
||||
describe('RECEIVED_NEW_PENDING_TRANSACTION_BATCH', () => { |
||||
test('single transaction', () => { |
||||
const state = initialState |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHash: '0x00', |
||||
transactionHtml: 'test' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual(['test']) |
||||
expect(output.newPendingTransactionHashesBatch.length).toEqual(0) |
||||
expect(output.pendingTransactionCount).toEqual(1) |
||||
}) |
||||
test('large batch of transactions', () => { |
||||
const state = initialState |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHash: '0x01', |
||||
transactionHtml: 'test 1' |
||||
},{ |
||||
transactionHash: '0x02', |
||||
transactionHtml: 'test 2' |
||||
},{ |
||||
transactionHash: '0x03', |
||||
transactionHtml: 'test 3' |
||||
},{ |
||||
transactionHash: '0x04', |
||||
transactionHtml: 'test 4' |
||||
},{ |
||||
transactionHash: '0x05', |
||||
transactionHtml: 'test 5' |
||||
},{ |
||||
transactionHash: '0x06', |
||||
transactionHtml: 'test 6' |
||||
},{ |
||||
transactionHash: '0x07', |
||||
transactionHtml: 'test 7' |
||||
},{ |
||||
transactionHash: '0x08', |
||||
transactionHtml: 'test 8' |
||||
},{ |
||||
transactionHash: '0x09', |
||||
transactionHtml: 'test 9' |
||||
},{ |
||||
transactionHash: '0x10', |
||||
transactionHtml: 'test 10' |
||||
},{ |
||||
transactionHash: '0x11', |
||||
transactionHtml: 'test 11' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual([]) |
||||
expect(output.newPendingTransactionHashesBatch.length).toEqual(11) |
||||
expect(output.pendingTransactionCount).toEqual(11) |
||||
}) |
||||
test('single transaction after single transaction', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
newPendingTransactions: ['test 1'], |
||||
pendingTransactionCount: 1 |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHash: '0x02', |
||||
transactionHtml: 'test 2' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual(['test 1', 'test 2']) |
||||
expect(output.newPendingTransactionHashesBatch.length).toEqual(0) |
||||
expect(output.pendingTransactionCount).toEqual(2) |
||||
}) |
||||
test('single transaction after large batch of transactions', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
newPendingTransactionHashesBatch: ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11'] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHash: '0x12', |
||||
transactionHtml: 'test 12' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual([]) |
||||
expect(output.newPendingTransactionHashesBatch.length).toEqual(12) |
||||
}) |
||||
test('large batch of transactions after large batch of transactions', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
newPendingTransactionHashesBatch: ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11'] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHash: '0x12', |
||||
transactionHtml: 'test 12' |
||||
},{ |
||||
transactionHash: '0x13', |
||||
transactionHtml: 'test 13' |
||||
},{ |
||||
transactionHash: '0x14', |
||||
transactionHtml: 'test 14' |
||||
},{ |
||||
transactionHash: '0x15', |
||||
transactionHtml: 'test 15' |
||||
},{ |
||||
transactionHash: '0x16', |
||||
transactionHtml: 'test 16' |
||||
},{ |
||||
transactionHash: '0x17', |
||||
transactionHtml: 'test 17' |
||||
},{ |
||||
transactionHash: '0x18', |
||||
transactionHtml: 'test 18' |
||||
},{ |
||||
transactionHash: '0x19', |
||||
transactionHtml: 'test 19' |
||||
},{ |
||||
transactionHash: '0x20', |
||||
transactionHtml: 'test 20' |
||||
},{ |
||||
transactionHash: '0x21', |
||||
transactionHtml: 'test 21' |
||||
},{ |
||||
transactionHash: '0x22', |
||||
transactionHtml: 'test 22' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual([]) |
||||
expect(output.newPendingTransactionHashesBatch.length).toEqual(22) |
||||
}) |
||||
test('after disconnection', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
channelDisconnected: true |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHash: '0x00', |
||||
transactionHtml: 'test' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual([]) |
||||
}) |
||||
test('on page 2+', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
beyondPageOne: true, |
||||
pendingTransactionCount: 1 |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_PENDING_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHash: '0x00', |
||||
transactionHtml: 'test' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactions).toEqual([]) |
||||
expect(output.pendingTransactionCount).toEqual(2) |
||||
}) |
||||
}) |
||||
|
||||
describe('RECEIVED_NEW_TRANSACTION', () => { |
||||
test('single transaction collated', () => { |
||||
const state = { ...initialState, pendingTransactionCount: 2 } |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
transactionHash: '0x00' |
||||
} |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.pendingTransactionCount).toBe(1) |
||||
expect(output.newTransactionHashes).toEqual(['0x00']) |
||||
}) |
||||
test('single transaction collated after batch', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
newPendingTransactionHashesBatch: ['0x01', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11'] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
transactionHash: '0x01' |
||||
} |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newPendingTransactionHashesBatch.length).toEqual(10) |
||||
expect(output.newPendingTransactionHashesBatch).not.toContain('0x01') |
||||
}) |
||||
test('on page 2+', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
beyondPageOne: true, |
||||
pendingTransactionCount: 2 |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION', |
||||
msg: { |
||||
transactionHash: '0x01' |
||||
} |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.pendingTransactionCount).toEqual(1) |
||||
}) |
||||
}) |
@ -0,0 +1,160 @@ |
||||
import { reducer, initialState } from '../../js/pages/transactions' |
||||
|
||||
test('CHANNEL_DISCONNECTED', () => { |
||||
const state = initialState |
||||
const action = { |
||||
type: 'CHANNEL_DISCONNECTED' |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.channelDisconnected).toBe(true) |
||||
expect(output.batchCountAccumulator).toBe(0) |
||||
}) |
||||
|
||||
describe('RECEIVED_NEW_TRANSACTION_BATCH', () => { |
||||
test('single transaction', () => { |
||||
const state = initialState |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHtml: 'test' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual(['test']) |
||||
expect(output.batchCountAccumulator).toEqual(0) |
||||
expect(output.transactionCount).toEqual(1) |
||||
}) |
||||
test('large batch of transactions', () => { |
||||
const state = initialState |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHtml: 'test 1' |
||||
},{ |
||||
transactionHtml: 'test 2' |
||||
},{ |
||||
transactionHtml: 'test 3' |
||||
},{ |
||||
transactionHtml: 'test 4' |
||||
},{ |
||||
transactionHtml: 'test 5' |
||||
},{ |
||||
transactionHtml: 'test 6' |
||||
},{ |
||||
transactionHtml: 'test 7' |
||||
},{ |
||||
transactionHtml: 'test 8' |
||||
},{ |
||||
transactionHtml: 'test 9' |
||||
},{ |
||||
transactionHtml: 'test 10' |
||||
},{ |
||||
transactionHtml: 'test 11' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([]) |
||||
expect(output.batchCountAccumulator).toEqual(11) |
||||
expect(output.transactionCount).toEqual(11) |
||||
}) |
||||
test('single transaction after single transaction', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
newTransactions: ['test 1'] |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHtml: 'test 2' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual(['test 1', 'test 2']) |
||||
expect(output.batchCountAccumulator).toEqual(0) |
||||
}) |
||||
test('single transaction after large batch of transactions', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
batchCountAccumulator: 11 |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHtml: 'test 12' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([]) |
||||
expect(output.batchCountAccumulator).toEqual(12) |
||||
}) |
||||
test('large batch of transactions after large batch of transactions', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
batchCountAccumulator: 11 |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHtml: 'test 12' |
||||
},{ |
||||
transactionHtml: 'test 13' |
||||
},{ |
||||
transactionHtml: 'test 14' |
||||
},{ |
||||
transactionHtml: 'test 15' |
||||
},{ |
||||
transactionHtml: 'test 16' |
||||
},{ |
||||
transactionHtml: 'test 17' |
||||
},{ |
||||
transactionHtml: 'test 18' |
||||
},{ |
||||
transactionHtml: 'test 19' |
||||
},{ |
||||
transactionHtml: 'test 20' |
||||
},{ |
||||
transactionHtml: 'test 21' |
||||
},{ |
||||
transactionHtml: 'test 22' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([]) |
||||
expect(output.batchCountAccumulator).toEqual(22) |
||||
}) |
||||
test('after disconnection', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
channelDisconnected: true |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHtml: 'test' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([]) |
||||
expect(output.batchCountAccumulator).toEqual(0) |
||||
}) |
||||
test('on page 2+', () => { |
||||
const state = Object.assign({}, initialState, { |
||||
beyondPageOne: true, |
||||
transactionCount: 1 |
||||
}) |
||||
const action = { |
||||
type: 'RECEIVED_NEW_TRANSACTION_BATCH', |
||||
msgs: [{ |
||||
transactionHtml: 'test' |
||||
}] |
||||
} |
||||
const output = reducer(state, action) |
||||
|
||||
expect(output.newTransactions).toEqual([]) |
||||
expect(output.batchCountAccumulator).toEqual(0) |
||||
expect(output.transactionCount).toEqual(2) |
||||
}) |
||||
}) |
@ -0,0 +1,131 @@ |
||||
import $ from 'jquery' |
||||
import _ from 'lodash' |
||||
import URI from 'urijs' |
||||
import humps from 'humps' |
||||
import numeral from 'numeral' |
||||
import socket from '../socket' |
||||
import { updateAllAges } from '../lib/from_now' |
||||
import { batchChannel, initRedux, slideDownPrepend, slideUpRemove } from '../utils' |
||||
|
||||
const BATCH_THRESHOLD = 10 |
||||
|
||||
export const initialState = { |
||||
newPendingTransactionHashesBatch: [], |
||||
beyondPageOne: null, |
||||
channelDisconnected: false, |
||||
newPendingTransactions: [], |
||||
newTransactionHashes: [], |
||||
pendingTransactionCount: null |
||||
} |
||||
|
||||
export function reducer (state = initialState, action) { |
||||
switch (action.type) { |
||||
case 'PAGE_LOAD': { |
||||
return Object.assign({}, state, { |
||||
beyondPageOne: action.beyondPageOne, |
||||
pendingTransactionCount: numeral(action.pendingTransactionCount).value() |
||||
}) |
||||
} |
||||
case 'CHANNEL_DISCONNECTED': { |
||||
return Object.assign({}, state, { |
||||
channelDisconnected: true |
||||
}) |
||||
} |
||||
case 'RECEIVED_NEW_TRANSACTION': { |
||||
if (state.channelDisconnected) return state |
||||
|
||||
return Object.assign({}, state, { |
||||
newPendingTransactionHashesBatch: _.without(state.newPendingTransactionHashesBatch, action.msg.transactionHash), |
||||
pendingTransactionCount: state.pendingTransactionCount - 1, |
||||
newTransactionHashes: [action.msg.transactionHash] |
||||
}) |
||||
} |
||||
case 'RECEIVED_NEW_PENDING_TRANSACTION_BATCH': { |
||||
if (state.channelDisconnected) return state |
||||
|
||||
const pendingTransactionCount = state.pendingTransactionCount + action.msgs.length |
||||
|
||||
if (state.beyondPageOne) return Object.assign({}, state, { pendingTransactionCount }) |
||||
|
||||
if (!state.newPendingTransactionHashesBatch.length && action.msgs.length < BATCH_THRESHOLD) { |
||||
return Object.assign({}, state, { |
||||
newPendingTransactions: [ |
||||
...state.newPendingTransactions, |
||||
..._.map(action.msgs, 'transactionHtml') |
||||
], |
||||
pendingTransactionCount |
||||
}) |
||||
} else { |
||||
return Object.assign({}, state, { |
||||
newPendingTransactionHashesBatch: [ |
||||
...state.newPendingTransactionHashesBatch, |
||||
..._.map(action.msgs, 'transactionHash') |
||||
], |
||||
pendingTransactionCount |
||||
}) |
||||
} |
||||
} |
||||
default: |
||||
return state |
||||
} |
||||
} |
||||
|
||||
const $transactionPendingListPage = $('[data-page="transaction-pending-list"]') |
||||
if ($transactionPendingListPage.length) { |
||||
initRedux(reducer, { |
||||
main (store) { |
||||
store.dispatch({ |
||||
type: 'PAGE_LOAD', |
||||
pendingTransactionCount: $('[data-selector="transaction-pending-count"]').text(), |
||||
beyondPageOne: !!humps.camelizeKeys(URI(window.location).query(true)).insertedAt |
||||
}) |
||||
const transactionsChannel = socket.channel(`transactions:new_transaction`) |
||||
transactionsChannel.join() |
||||
transactionsChannel.onError(() => store.dispatch({ type: 'CHANNEL_DISCONNECTED' })) |
||||
transactionsChannel.on('transaction', (msg) => |
||||
store.dispatch({ type: 'RECEIVED_NEW_TRANSACTION', msg: humps.camelizeKeys(msg) }) |
||||
) |
||||
const pendingTransactionsChannel = socket.channel(`transactions:new_pending_transaction`) |
||||
pendingTransactionsChannel.join() |
||||
pendingTransactionsChannel.onError(() => store.dispatch({ type: 'CHANNEL_DISCONNECTED' })) |
||||
pendingTransactionsChannel.on('pending_transaction', batchChannel((msgs) => |
||||
store.dispatch({ type: 'RECEIVED_NEW_PENDING_TRANSACTION_BATCH', msgs: humps.camelizeKeys(msgs) })) |
||||
) |
||||
}, |
||||
render (state, oldState) { |
||||
const $channelBatching = $('[data-selector="channel-batching-message"]') |
||||
const $channelBatchingCount = $('[data-selector="channel-batching-count"]') |
||||
const $channelDisconnected = $('[data-selector="channel-disconnected-message"]') |
||||
const $pendingTransactionsList = $('[data-selector="transactions-pending-list"]') |
||||
const $pendingTransactionsCount = $('[data-selector="transaction-pending-count"]') |
||||
|
||||
if (state.channelDisconnected) $channelDisconnected.show() |
||||
if (oldState.pendingTransactionCount !== state.pendingTransactionCount) { |
||||
$pendingTransactionsCount.empty().append(numeral(state.pendingTransactionCount).format()) |
||||
} |
||||
if (oldState.newTransactionHashes !== state.newTransactionHashes && state.newTransactionHashes.length > 0) { |
||||
const $transaction = $(`[data-transaction-hash="${state.newTransactionHashes[0]}"]`) |
||||
$transaction.addClass('shrink-out') |
||||
setTimeout(() => { |
||||
if ($transaction.length === 1 && $transaction.siblings().length === 0 && state.pendingTransactionCount > 0) { |
||||
window.location.href = URI(window.location).removeQuery('inserted_at').removeQuery('hash').toString() |
||||
} else { |
||||
slideUpRemove($transaction) |
||||
} |
||||
}, 400) |
||||
} |
||||
if (state.newPendingTransactionHashesBatch.length) { |
||||
$channelBatching.show() |
||||
$channelBatchingCount[0].innerHTML = numeral(state.newPendingTransactionHashesBatch.length).format() |
||||
} else { |
||||
$channelBatching.hide() |
||||
} |
||||
if (oldState.newPendingTransactions !== state.newPendingTransactions) { |
||||
const newTransactionsToInsert = state.newPendingTransactions.slice(oldState.newPendingTransactions.length) |
||||
slideDownPrepend($pendingTransactionsList, newTransactionsToInsert.reverse().join('')) |
||||
|
||||
updateAllAges() |
||||
} |
||||
} |
||||
}) |
||||
} |
@ -0,0 +1,100 @@ |
||||
import $ from 'jquery' |
||||
import _ from 'lodash' |
||||
import URI from 'urijs' |
||||
import humps from 'humps' |
||||
import numeral from 'numeral' |
||||
import socket from '../socket' |
||||
import { updateAllAges } from '../lib/from_now' |
||||
import { batchChannel, initRedux, slideDownPrepend } from '../utils' |
||||
|
||||
const BATCH_THRESHOLD = 10 |
||||
|
||||
export const initialState = { |
||||
batchCountAccumulator: 0, |
||||
beyondPageOne: null, |
||||
channelDisconnected: false, |
||||
newTransactions: [], |
||||
transactionCount: null |
||||
} |
||||
|
||||
export function reducer (state = initialState, action) { |
||||
switch (action.type) { |
||||
case 'PAGE_LOAD': { |
||||
return Object.assign({}, state, { |
||||
beyondPageOne: action.beyondPageOne, |
||||
transactionCount: numeral(action.transactionCount).value() |
||||
}) |
||||
} |
||||
case 'CHANNEL_DISCONNECTED': { |
||||
return Object.assign({}, state, { |
||||
channelDisconnected: true, |
||||
batchCountAccumulator: 0 |
||||
}) |
||||
} |
||||
case 'RECEIVED_NEW_TRANSACTION_BATCH': { |
||||
if (state.channelDisconnected) return state |
||||
|
||||
const transactionCount = state.transactionCount + action.msgs.length |
||||
|
||||
if (state.beyondPageOne) return Object.assign({}, state, { transactionCount }) |
||||
|
||||
if (!state.batchCountAccumulator && action.msgs.length < BATCH_THRESHOLD) { |
||||
return Object.assign({}, state, { |
||||
newTransactions: [ |
||||
...state.newTransactions, |
||||
..._.map(action.msgs, 'transactionHtml') |
||||
], |
||||
transactionCount |
||||
}) |
||||
} else { |
||||
return Object.assign({}, state, { |
||||
batchCountAccumulator: state.batchCountAccumulator + action.msgs.length, |
||||
transactionCount |
||||
}) |
||||
} |
||||
} |
||||
default: |
||||
return state |
||||
} |
||||
} |
||||
|
||||
const $transactionListPage = $('[data-page="transaction-list"]') |
||||
if ($transactionListPage.length) { |
||||
initRedux(reducer, { |
||||
main (store) { |
||||
store.dispatch({ |
||||
type: 'PAGE_LOAD', |
||||
transactionCount: $('[data-selector="transaction-count"]').text(), |
||||
beyondPageOne: !!humps.camelizeKeys(URI(window.location).query(true)).index |
||||
}) |
||||
const transactionsChannel = socket.channel(`transactions:new_transaction`) |
||||
transactionsChannel.join() |
||||
transactionsChannel.onError(() => store.dispatch({ type: 'CHANNEL_DISCONNECTED' })) |
||||
transactionsChannel.on('transaction', batchChannel((msgs) => |
||||
store.dispatch({ type: 'RECEIVED_NEW_TRANSACTION_BATCH', msgs: humps.camelizeKeys(msgs) })) |
||||
) |
||||
}, |
||||
render (state, oldState) { |
||||
const $channelBatching = $('[data-selector="channel-batching-message"]') |
||||
const $channelBatchingCount = $('[data-selector="channel-batching-count"]') |
||||
const $channelDisconnected = $('[data-selector="channel-disconnected-message"]') |
||||
const $transactionsList = $('[data-selector="transactions-list"]') |
||||
const $transactionCount = $('[data-selector="transaction-count"]') |
||||
|
||||
if (state.channelDisconnected) $channelDisconnected.show() |
||||
if (oldState.transactionCount !== state.transactionCount) $transactionCount.empty().append(numeral(state.transactionCount).format()) |
||||
if (state.batchCountAccumulator) { |
||||
$channelBatching.show() |
||||
$channelBatchingCount[0].innerHTML = numeral(state.batchCountAccumulator).format() |
||||
} else { |
||||
$channelBatching.hide() |
||||
} |
||||
if (oldState.newTransactions !== state.newTransactions) { |
||||
const newTransactionsToInsert = state.newTransactions.slice(oldState.newTransactions.length) |
||||
slideDownPrepend($transactionsList, newTransactionsToInsert.reverse().join('')) |
||||
|
||||
updateAllAges() |
||||
} |
||||
} |
||||
}) |
||||
} |
@ -0,0 +1,12 @@ |
||||
defmodule BlockScoutWeb.Resolvers.Address do |
||||
@moduledoc false |
||||
|
||||
alias Explorer.Chain |
||||
|
||||
def get_by(_, %{hashes: hashes}, _) do |
||||
case Chain.hashes_to_addresses(hashes) do |
||||
[] -> {:error, "Addresses not found."} |
||||
result -> {:ok, result} |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,142 @@ |
||||
defmodule BlockScoutWeb.Schema.Query.AddressTest do |
||||
use BlockScoutWeb.ConnCase |
||||
|
||||
describe "address field" do |
||||
test "with valid argument 'hashes', returns all expected fields", %{conn: conn} do |
||||
address = insert(:address, fetched_coin_balance: 100) |
||||
|
||||
query = """ |
||||
query ($hashes: [AddressHash!]!) { |
||||
addresses(hashes: $hashes) { |
||||
hash |
||||
fetched_coin_balance |
||||
fetched_coin_balance_block_number |
||||
contract_code |
||||
} |
||||
} |
||||
""" |
||||
|
||||
variables = %{"hashes" => to_string(address.hash)} |
||||
|
||||
conn = get(conn, "/graphql", query: query, variables: variables) |
||||
|
||||
assert json_response(conn, 200) == %{ |
||||
"data" => %{ |
||||
"addresses" => [ |
||||
%{ |
||||
"hash" => to_string(address.hash), |
||||
"fetched_coin_balance" => to_string(address.fetched_coin_balance.value), |
||||
"fetched_coin_balance_block_number" => address.fetched_coin_balance_block_number, |
||||
"contract_code" => nil |
||||
} |
||||
] |
||||
} |
||||
} |
||||
end |
||||
|
||||
test "with contract address, `contract_code` is serialized as expected", %{conn: conn} do |
||||
address = insert(:contract_address, fetched_coin_balance: 100) |
||||
|
||||
query = """ |
||||
query ($hashes: [AddressHash!]!) { |
||||
addresses(hashes: $hashes) { |
||||
contract_code |
||||
} |
||||
} |
||||
""" |
||||
|
||||
variables = %{"hashes" => to_string(address.hash)} |
||||
|
||||
conn = get(conn, "/graphql", query: query, variables: variables) |
||||
|
||||
assert json_response(conn, 200) == %{ |
||||
"data" => %{ |
||||
"addresses" => [ |
||||
%{ |
||||
"contract_code" => to_string(address.contract_code) |
||||
} |
||||
] |
||||
} |
||||
} |
||||
end |
||||
|
||||
test "errors for non-existent address hashes", %{conn: conn} do |
||||
address = build(:address) |
||||
|
||||
query = """ |
||||
query ($hashes: [AddressHash!]!) { |
||||
addresses(hashes: $hashes) { |
||||
fetched_coin_balance |
||||
} |
||||
} |
||||
""" |
||||
|
||||
variables = %{"hashes" => [to_string(address.hash)]} |
||||
|
||||
conn = get(conn, "/graphql", query: query, variables: variables) |
||||
|
||||
assert %{"errors" => [error]} = json_response(conn, 200) |
||||
assert error["message"] =~ ~s(Addresses not found.) |
||||
end |
||||
|
||||
test "errors if argument 'hashes' is missing", %{conn: conn} do |
||||
query = """ |
||||
query { |
||||
addresses { |
||||
fetched_coin_balance |
||||
} |
||||
} |
||||
""" |
||||
|
||||
variables = %{} |
||||
|
||||
conn = get(conn, "/graphql", query: query, variables: variables) |
||||
|
||||
assert %{"errors" => [error]} = json_response(conn, 200) |
||||
assert error["message"] == ~s(In argument "hashes": Expected type "[AddressHash!]!", found null.) |
||||
end |
||||
|
||||
test "errors if argument 'hashes' is not a list of address hashes", %{conn: conn} do |
||||
query = """ |
||||
query ($hashes: [AddressHash!]!) { |
||||
addresses(hashes: $hashes) { |
||||
fetched_coin_balance |
||||
} |
||||
} |
||||
""" |
||||
|
||||
variables = %{"hashes" => ["someInvalidHash"]} |
||||
|
||||
conn = get(conn, "/graphql", query: query, variables: variables) |
||||
|
||||
assert %{"errors" => [error]} = json_response(conn, 200) |
||||
assert error["message"] =~ ~s(Argument "hashes" has invalid value) |
||||
end |
||||
|
||||
test "correlates complexity to size of 'hashes' argument", %{conn: conn} do |
||||
# max of 12 addresses with four fields of complexity 1 can be fetched |
||||
# per query: |
||||
# 12 * 4 = 48, which is less than a max complexity of 50 |
||||
hashes = 13 |> build_list(:address) |> Enum.map(&to_string(&1.hash)) |
||||
|
||||
query = """ |
||||
query ($hashes: [AddressHash!]!) { |
||||
addresses(hashes: $hashes) { |
||||
hash |
||||
fetched_coin_balance |
||||
fetched_coin_balance_block_number |
||||
contract_code |
||||
} |
||||
} |
||||
""" |
||||
|
||||
variables = %{"hashes" => hashes} |
||||
|
||||
conn = get(conn, "/graphql", query: query, variables: variables) |
||||
|
||||
assert %{"errors" => [error1, error2]} = json_response(conn, 200) |
||||
assert error1["message"] =~ ~s(Field addresses is too complex) |
||||
assert error2["message"] =~ ~s(Operation is too complex) |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,108 @@ |
||||
defmodule Explorer.Chain.Address.CurrentTokenBalance do |
||||
@moduledoc """ |
||||
Represents the current token balance from addresses according to the last block. |
||||
""" |
||||
|
||||
use Ecto.Schema |
||||
import Ecto.Changeset |
||||
import Ecto.Query, only: [from: 2, limit: 2, order_by: 3, preload: 2, where: 3] |
||||
|
||||
alias Explorer.{Chain, PagingOptions} |
||||
alias Explorer.Chain.{Address, Block, Hash, Token} |
||||
|
||||
@default_paging_options %PagingOptions{page_size: 50} |
||||
|
||||
@typedoc """ |
||||
* `address` - The `t:Explorer.Chain.Address.t/0` that is the balance's owner. |
||||
* `address_hash` - The address hash foreign key. |
||||
* `token` - The `t:Explorer.Chain.Token/0` so that the address has the balance. |
||||
* `token_contract_address_hash` - The contract address hash foreign key. |
||||
* `block_number` - The block's number that the transfer took place. |
||||
* `value` - The value that's represents the balance. |
||||
""" |
||||
@type t :: %__MODULE__{ |
||||
address: %Ecto.Association.NotLoaded{} | Address.t(), |
||||
address_hash: Hash.Address.t(), |
||||
token: %Ecto.Association.NotLoaded{} | Token.t(), |
||||
token_contract_address_hash: Hash.Address, |
||||
block_number: Block.block_number(), |
||||
inserted_at: DateTime.t(), |
||||
updated_at: DateTime.t(), |
||||
value: Decimal.t() | nil |
||||
} |
||||
|
||||
schema "address_current_token_balances" do |
||||
field(:value, :decimal) |
||||
field(:block_number, :integer) |
||||
field(:value_fetched_at, :utc_datetime) |
||||
|
||||
belongs_to(:address, Address, foreign_key: :address_hash, references: :hash, type: Hash.Address) |
||||
|
||||
belongs_to( |
||||
:token, |
||||
Token, |
||||
foreign_key: :token_contract_address_hash, |
||||
references: :contract_address_hash, |
||||
type: Hash.Address |
||||
) |
||||
|
||||
timestamps() |
||||
end |
||||
|
||||
@optional_fields ~w(value value_fetched_at)a |
||||
@required_fields ~w(address_hash block_number token_contract_address_hash)a |
||||
@allowed_fields @optional_fields ++ @required_fields |
||||
|
||||
@doc false |
||||
def changeset(%__MODULE__{} = token_balance, attrs) do |
||||
token_balance |
||||
|> cast(attrs, @allowed_fields) |
||||
|> validate_required(@required_fields) |
||||
|> foreign_key_constraint(:address_hash) |
||||
|> foreign_key_constraint(:token_contract_address_hash) |
||||
end |
||||
|
||||
{:ok, burn_address_hash} = Chain.string_to_address_hash("0x0000000000000000000000000000000000000000") |
||||
@burn_address_hash burn_address_hash |
||||
|
||||
@doc """ |
||||
Builds an `Ecto.Query` to fetch the token holders from the given token contract address hash. |
||||
|
||||
The Token Holders are the addresses that own a positive amount of the Token. So this query is |
||||
considering the following conditions: |
||||
|
||||
* The token balance from the last block. |
||||
* Balances greater than 0. |
||||
* Excluding the burn address (0x0000000000000000000000000000000000000000). |
||||
|
||||
""" |
||||
def token_holders_ordered_by_value(token_contract_address_hash, options \\ []) do |
||||
paging_options = Keyword.get(options, :paging_options, @default_paging_options) |
||||
|
||||
token_contract_address_hash |
||||
|> token_holders_query |
||||
|> preload(:address) |
||||
|> order_by([tb], desc: :value) |
||||
|> page_token_balances(paging_options) |
||||
|> limit(^paging_options.page_size) |
||||
end |
||||
|
||||
defp token_holders_query(token_contract_address_hash) do |
||||
from( |
||||
tb in __MODULE__, |
||||
where: tb.token_contract_address_hash == ^token_contract_address_hash, |
||||
where: tb.address_hash != ^@burn_address_hash, |
||||
where: tb.value > 0 |
||||
) |
||||
end |
||||
|
||||
defp page_token_balances(query, %PagingOptions{key: nil}), do: query |
||||
|
||||
defp page_token_balances(query, %PagingOptions{key: {value, address_hash}}) do |
||||
where( |
||||
query, |
||||
[tb], |
||||
tb.value < ^value or (tb.value == ^value and tb.address_hash < ^address_hash) |
||||
) |
||||
end |
||||
end |
@ -0,0 +1,124 @@ |
||||
defmodule Explorer.Chain.Import.Address.CurrentTokenBalances do |
||||
@moduledoc """ |
||||
Bulk imports `t:Explorer.Chain.Address.CurrentTokenBalance.t/0`. |
||||
""" |
||||
|
||||
require Ecto.Query |
||||
|
||||
import Ecto.Query, only: [from: 2] |
||||
|
||||
alias Ecto.{Changeset, Multi} |
||||
alias Explorer.Chain.Address.CurrentTokenBalance |
||||
alias Explorer.Chain.Import |
||||
|
||||
@behaviour Import.Runner |
||||
|
||||
# milliseconds |
||||
@timeout 60_000 |
||||
|
||||
@type imported :: [CurrentTokenBalance.t()] |
||||
|
||||
@impl Import.Runner |
||||
def ecto_schema_module, do: CurrentTokenBalance |
||||
|
||||
@impl Import.Runner |
||||
def option_key, do: :address_current_token_balances |
||||
|
||||
@impl Import.Runner |
||||
def imported_table_row do |
||||
%{ |
||||
value_type: "[#{ecto_schema_module()}.t()]", |
||||
value_description: "List of `t:#{ecto_schema_module()}.t/0`s" |
||||
} |
||||
end |
||||
|
||||
@impl Import.Runner |
||||
def run(multi, changes_list, %{timestamps: timestamps} = options) do |
||||
insert_options = |
||||
options |
||||
|> Map.get(option_key(), %{}) |
||||
|> Map.take(~w(on_conflict timeout)a) |
||||
|> Map.put_new(:timeout, @timeout) |
||||
|> Map.put(:timestamps, timestamps) |
||||
|
||||
Multi.run(multi, :address_current_token_balances, fn _ -> |
||||
insert(changes_list, insert_options) |
||||
end) |
||||
end |
||||
|
||||
@impl Import.Runner |
||||
def timeout, do: @timeout |
||||
|
||||
@spec insert([map()], %{ |
||||
optional(:on_conflict) => Import.Runner.on_conflict(), |
||||
required(:timeout) => timeout(), |
||||
required(:timestamps) => Import.timestamps() |
||||
}) :: |
||||
{:ok, [CurrentTokenBalance.t()]} |
||||
| {:error, [Changeset.t()]} |
||||
def insert(changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do |
||||
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) |
||||
|
||||
{:ok, _} = |
||||
Import.insert_changes_list( |
||||
unique_token_balances(changes_list), |
||||
conflict_target: ~w(address_hash token_contract_address_hash)a, |
||||
on_conflict: on_conflict, |
||||
for: CurrentTokenBalance, |
||||
returning: true, |
||||
timeout: timeout, |
||||
timestamps: timestamps |
||||
) |
||||
end |
||||
|
||||
# Remove duplicated token balances based on `{address_hash, token_hash}` considering the last block |
||||
# to avoid `cardinality_violation` error in Postgres. This error happens when there are duplicated |
||||
# rows being inserted. |
||||
defp unique_token_balances(changes_list) do |
||||
changes_list |
||||
|> Enum.sort(&(&1.block_number > &2.block_number)) |
||||
|> Enum.uniq_by(fn %{address_hash: address_hash, token_contract_address_hash: token_hash} -> |
||||
{address_hash, token_hash} |
||||
end) |
||||
end |
||||
|
||||
defp default_on_conflict do |
||||
from( |
||||
current_token_balance in CurrentTokenBalance, |
||||
update: [ |
||||
set: [ |
||||
block_number: |
||||
fragment( |
||||
"CASE WHEN EXCLUDED.block_number > ? THEN EXCLUDED.block_number ELSE ? END", |
||||
current_token_balance.block_number, |
||||
current_token_balance.block_number |
||||
), |
||||
inserted_at: |
||||
fragment( |
||||
"CASE WHEN EXCLUDED.block_number > ? THEN EXCLUDED.inserted_at ELSE ? END", |
||||
current_token_balance.block_number, |
||||
current_token_balance.inserted_at |
||||
), |
||||
updated_at: |
||||
fragment( |
||||
"CASE WHEN EXCLUDED.block_number > ? THEN EXCLUDED.updated_at ELSE ? END", |
||||
current_token_balance.block_number, |
||||
current_token_balance.updated_at |
||||
), |
||||
value: |
||||
fragment( |
||||
"CASE WHEN EXCLUDED.block_number > ? THEN EXCLUDED.value ELSE ? END", |
||||
current_token_balance.block_number, |
||||
current_token_balance.value |
||||
), |
||||
value_fetched_at: |
||||
fragment( |
||||
"CASE WHEN EXCLUDED.block_number > ? THEN EXCLUDED.value_fetched_at ELSE ? END", |
||||
current_token_balance.block_number, |
||||
current_token_balance.value_fetched_at |
||||
) |
||||
] |
||||
] |
||||
) |
||||
end |
||||
end |
@ -0,0 +1,32 @@ |
||||
defmodule Explorer.Repo.Migrations.CreateAddressCurrentTokenBalances do |
||||
use Ecto.Migration |
||||
|
||||
def change do |
||||
create table(:address_current_token_balances) do |
||||
add(:address_hash, references(:addresses, column: :hash, type: :bytea), null: false) |
||||
add(:block_number, :bigint, null: false) |
||||
|
||||
add( |
||||
:token_contract_address_hash, |
||||
references(:tokens, column: :contract_address_hash, type: :bytea), |
||||
null: false |
||||
) |
||||
|
||||
add(:value, :decimal, null: true) |
||||
add(:value_fetched_at, :utc_datetime, default: fragment("NULL"), null: true) |
||||
|
||||
timestamps(null: false, type: :utc_datetime) |
||||
end |
||||
|
||||
create(unique_index(:address_current_token_balances, ~w(address_hash token_contract_address_hash)a)) |
||||
|
||||
create( |
||||
index( |
||||
:address_current_token_balances, |
||||
[:value], |
||||
name: :address_current_token_balances_value, |
||||
where: "value IS NOT NULL" |
||||
) |
||||
) |
||||
end |
||||
end |
@ -0,0 +1,149 @@ |
||||
defmodule Explorer.Chain.Address.CurrentTokenBalanceTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.{Chain, PagingOptions, Repo} |
||||
alias Explorer.Chain.Token |
||||
alias Explorer.Chain.Address.CurrentTokenBalance |
||||
|
||||
describe "token_holders_ordered_by_value/2" do |
||||
test "returns the last value for each address" do |
||||
%Token{contract_address_hash: contract_address_hash} = insert(:token) |
||||
address_a = insert(:address) |
||||
address_b = insert(:address) |
||||
|
||||
insert( |
||||
:address_current_token_balance, |
||||
address: address_a, |
||||
token_contract_address_hash: contract_address_hash, |
||||
value: 5000 |
||||
) |
||||
|
||||
insert( |
||||
:address_current_token_balance, |
||||
address: address_b, |
||||
block_number: 1001, |
||||
token_contract_address_hash: contract_address_hash, |
||||
value: 4000 |
||||
) |
||||
|
||||
token_holders_count = |
||||
contract_address_hash |
||||
|> CurrentTokenBalance.token_holders_ordered_by_value() |
||||
|> Repo.all() |
||||
|> Enum.count() |
||||
|
||||
assert token_holders_count == 2 |
||||
end |
||||
|
||||
test "sort by the highest value" do |
||||
%Token{contract_address_hash: contract_address_hash} = insert(:token) |
||||
address_a = insert(:address) |
||||
address_b = insert(:address) |
||||
address_c = insert(:address) |
||||
|
||||
insert( |
||||
:address_current_token_balance, |
||||
address: address_a, |
||||
token_contract_address_hash: contract_address_hash, |
||||
value: 5000 |
||||
) |
||||
|
||||
insert( |
||||
:address_current_token_balance, |
||||
address: address_b, |
||||
token_contract_address_hash: contract_address_hash, |
||||
value: 4000 |
||||
) |
||||
|
||||
insert( |
||||
:address_current_token_balance, |
||||
address: address_c, |
||||
token_contract_address_hash: contract_address_hash, |
||||
value: 15000 |
||||
) |
||||
|
||||
token_holders_values = |
||||
contract_address_hash |
||||
|> CurrentTokenBalance.token_holders_ordered_by_value() |
||||
|> Repo.all() |
||||
|> Enum.map(&Decimal.to_integer(&1.value)) |
||||
|
||||
assert token_holders_values == [15_000, 5_000, 4_000] |
||||
end |
||||
|
||||
test "returns only token balances that have value greater than 0" do |
||||
%Token{contract_address_hash: contract_address_hash} = insert(:token) |
||||
|
||||
insert( |
||||
:address_current_token_balance, |
||||
token_contract_address_hash: contract_address_hash, |
||||
value: 0 |
||||
) |
||||
|
||||
result = |
||||
contract_address_hash |
||||
|> CurrentTokenBalance.token_holders_ordered_by_value() |
||||
|> Repo.all() |
||||
|
||||
assert result == [] |
||||
end |
||||
|
||||
test "ignores the burn address" do |
||||
{:ok, burn_address_hash} = Chain.string_to_address_hash("0x0000000000000000000000000000000000000000") |
||||
|
||||
burn_address = insert(:address, hash: burn_address_hash) |
||||
|
||||
%Token{contract_address_hash: contract_address_hash} = insert(:token) |
||||
|
||||
insert( |
||||
:address_current_token_balance, |
||||
address: burn_address, |
||||
token_contract_address_hash: contract_address_hash, |
||||
value: 1000 |
||||
) |
||||
|
||||
result = |
||||
contract_address_hash |
||||
|> CurrentTokenBalance.token_holders_ordered_by_value() |
||||
|> Repo.all() |
||||
|
||||
assert result == [] |
||||
end |
||||
|
||||
test "paginates the result by value and different address" do |
||||
address_a = build(:address, hash: "0xcb2cf1fd3199584ac5faa16c6aca49472dc6495a") |
||||
address_b = build(:address, hash: "0x5f26097334b6a32b7951df61fd0c5803ec5d8354") |
||||
|
||||
%Token{contract_address_hash: contract_address_hash} = insert(:token) |
||||
|
||||
first_page = |
||||
insert( |
||||
:address_current_token_balance, |
||||
address: address_a, |
||||
token_contract_address_hash: contract_address_hash, |
||||
value: 4000 |
||||
) |
||||
|
||||
second_page = |
||||
insert( |
||||
:address_current_token_balance, |
||||
address: address_b, |
||||
token_contract_address_hash: contract_address_hash, |
||||
value: 4000 |
||||
) |
||||
|
||||
paging_options = %PagingOptions{ |
||||
key: {first_page.value, first_page.address_hash}, |
||||
page_size: 2 |
||||
} |
||||
|
||||
result_paginated = |
||||
contract_address_hash |
||||
|> CurrentTokenBalance.token_holders_ordered_by_value(paging_options: paging_options) |
||||
|> Repo.all() |
||||
|> Enum.map(& &1.address_hash) |
||||
|
||||
assert result_paginated == [second_page.address_hash] |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,95 @@ |
||||
defmodule Explorer.Chain.Import.Address.CurrentTokenBalancesTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Chain.Import.Address.CurrentTokenBalances |
||||
|
||||
alias Explorer.Chain.{Address.CurrentTokenBalance} |
||||
|
||||
describe "insert/2" do |
||||
setup do |
||||
address = insert(:address, hash: "0xe8ddc5c7a2d2f0d7a9798459c0104fdf5e987aca") |
||||
token = insert(:token) |
||||
|
||||
insert_options = %{ |
||||
timeout: :infinity, |
||||
timestamps: %{inserted_at: DateTime.utc_now(), updated_at: DateTime.utc_now()} |
||||
} |
||||
|
||||
%{address: address, token: token, insert_options: insert_options} |
||||
end |
||||
|
||||
test "inserts in the current token balances", %{address: address, token: token, insert_options: insert_options} do |
||||
changes = [ |
||||
%{ |
||||
address_hash: address.hash, |
||||
block_number: 1, |
||||
token_contract_address_hash: token.contract_address_hash, |
||||
value: Decimal.new(100) |
||||
} |
||||
] |
||||
|
||||
CurrentTokenBalances.insert(changes, insert_options) |
||||
|
||||
current_token_balances = |
||||
CurrentTokenBalance |
||||
|> Explorer.Repo.all() |
||||
|> Enum.count() |
||||
|
||||
assert current_token_balances == 1 |
||||
end |
||||
|
||||
test "considers the last block upserting", %{address: address, token: token, insert_options: insert_options} do |
||||
insert( |
||||
:address_current_token_balance, |
||||
address: address, |
||||
block_number: 1, |
||||
token_contract_address_hash: token.contract_address_hash, |
||||
value: 100 |
||||
) |
||||
|
||||
changes = [ |
||||
%{ |
||||
address_hash: address.hash, |
||||
block_number: 2, |
||||
token_contract_address_hash: token.contract_address_hash, |
||||
value: Decimal.new(200) |
||||
} |
||||
] |
||||
|
||||
CurrentTokenBalances.insert(changes, insert_options) |
||||
|
||||
current_token_balance = Explorer.Repo.get_by(CurrentTokenBalance, address_hash: address.hash) |
||||
|
||||
assert current_token_balance.block_number == 2 |
||||
assert current_token_balance.value == Decimal.new(200) |
||||
end |
||||
|
||||
test "considers the last block when there are duplicated params", %{ |
||||
address: address, |
||||
token: token, |
||||
insert_options: insert_options |
||||
} do |
||||
changes = [ |
||||
%{ |
||||
address_hash: address.hash, |
||||
block_number: 4, |
||||
token_contract_address_hash: token.contract_address_hash, |
||||
value: Decimal.new(200) |
||||
}, |
||||
%{ |
||||
address_hash: address.hash, |
||||
block_number: 1, |
||||
token_contract_address_hash: token.contract_address_hash, |
||||
value: Decimal.new(100) |
||||
} |
||||
] |
||||
|
||||
CurrentTokenBalances.insert(changes, insert_options) |
||||
|
||||
current_token_balance = Explorer.Repo.get_by(CurrentTokenBalance, address_hash: address.hash) |
||||
|
||||
assert current_token_balance.block_number == 4 |
||||
assert current_token_balance.value == Decimal.new(200) |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,31 @@ |
||||
defmodule Indexer.Block.Transform do |
||||
@moduledoc """ |
||||
Protocol for transforming blocks. |
||||
""" |
||||
|
||||
@type block :: map() |
||||
|
||||
@doc """ |
||||
Transforms a block. |
||||
""" |
||||
@callback transform(block :: block()) :: block() |
||||
|
||||
@doc """ |
||||
Runs a list of blocks through the configured block transformer. |
||||
""" |
||||
def transform_blocks(blocks) when is_list(blocks) do |
||||
transformer = Application.get_env(:indexer, :block_transformer) |
||||
|
||||
unless transformer do |
||||
raise ArgumentError, |
||||
""" |
||||
No block transformer defined. Set a blocker transformer." |
||||
|
||||
config :indexer, |
||||
block_transformer: Indexer.Block.Transform.Base |
||||
""" |
||||
end |
||||
|
||||
Enum.map(blocks, &transformer.transform/1) |
||||
end |
||||
end |
@ -0,0 +1,14 @@ |
||||
defmodule Indexer.Block.Transform.Base do |
||||
@moduledoc """ |
||||
Default block transformer to be used. |
||||
""" |
||||
|
||||
alias Indexer.Block.Transform |
||||
|
||||
@behaviour Transform |
||||
|
||||
@impl Transform |
||||
def transform(block) when is_map(block) do |
||||
block |
||||
end |
||||
end |
@ -0,0 +1,16 @@ |
||||
defmodule Indexer.Block.Transform.Clique do |
||||
@moduledoc """ |
||||
Handles block transforms for Clique chain. |
||||
""" |
||||
|
||||
alias Indexer.Block.{Transform, Util} |
||||
|
||||
@behaviour Transform |
||||
|
||||
@impl Transform |
||||
def transform(block) when is_map(block) do |
||||
miner_address = Util.signer(block) |
||||
|
||||
%{block | miner_hash: miner_address} |
||||
end |
||||
end |
@ -0,0 +1,75 @@ |
||||
defmodule Indexer.Block.Util do |
||||
@moduledoc """ |
||||
Helper functions for parsing block information. |
||||
""" |
||||
|
||||
@doc """ |
||||
Calculates the signer's address by recovering the ECDSA public key. |
||||
|
||||
https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm |
||||
""" |
||||
def signer(block) when is_map(block) do |
||||
# Last 65 bytes is the signature. Multiply by two since we haven't transformed to raw bytes |
||||
{extra_data, signature} = String.split_at(trim_prefix(block.extra_data), -130) |
||||
|
||||
block = %{block | extra_data: extra_data} |
||||
|
||||
signature_hash = signature_hash(block) |
||||
|
||||
recover_pub_key(signature_hash, decode(signature)) |
||||
end |
||||
|
||||
# Signature hash calculated from the block header. |
||||
# Needed for PoA-based chains |
||||
defp signature_hash(block) do |
||||
header_data = [ |
||||
decode(block.parent_hash), |
||||
decode(block.sha3_uncles), |
||||
decode(block.miner_hash), |
||||
decode(block.state_root), |
||||
decode(block.transactions_root), |
||||
decode(block.receipts_root), |
||||
decode(block.logs_bloom), |
||||
block.difficulty, |
||||
block.number, |
||||
block.gas_limit, |
||||
block.gas_used, |
||||
DateTime.to_unix(block.timestamp), |
||||
decode(block.extra_data), |
||||
decode(block.mix_hash), |
||||
decode(block.nonce) |
||||
] |
||||
|
||||
:keccakf1600.hash(:sha3_256, ExRLP.encode(header_data)) |
||||
end |
||||
|
||||
defp trim_prefix("0x" <> rest), do: rest |
||||
|
||||
defp decode("0x" <> rest) do |
||||
decode(rest) |
||||
end |
||||
|
||||
defp decode(data) do |
||||
Base.decode16!(data, case: :mixed) |
||||
end |
||||
|
||||
# Recovers the key from the signature hash and signature |
||||
defp recover_pub_key(signature_hash, signature) do |
||||
<< |
||||
r::bytes-size(32), |
||||
s::bytes-size(32), |
||||
v::integer-size(8) |
||||
>> = signature |
||||
|
||||
# First byte represents compression which can be ignored |
||||
# Private key is the last 64 bytes |
||||
{:ok, <<_compression::bytes-size(1), private_key::binary>>} = |
||||
:libsecp256k1.ecdsa_recover_compact(signature_hash, r <> s, :uncompressed, v) |
||||
|
||||
# Public key comes from the last 20 bytes |
||||
<<_::bytes-size(12), public_key::binary>> = :keccakf1600.hash(:sha3_256, private_key) |
||||
|
||||
miner_address = Base.encode16(public_key, case: :lower) |
||||
"0x" <> miner_address |
||||
end |
||||
end |
@ -0,0 +1,42 @@ |
||||
defmodule Indexer.Block.Transform.BaseTest do |
||||
use ExUnit.Case |
||||
|
||||
alias Indexer.Block.Transform.Base |
||||
|
||||
@block %{ |
||||
difficulty: 1, |
||||
extra_data: |
||||
"0xd68301080d846765746886676f312e3130856c696e7578000000000000000000773ab2ca8f47904a14739ad80a75b71d9d29b9fff8b7ecdcb73efffa6f74122f17d304b5dc8e6e5f256c9474dd115c8d4dae31b7a3d409e5c3270f8fde41cd8c00", |
||||
gas_limit: 7_753_377, |
||||
gas_used: 1_810_195, |
||||
hash: "0x7004c895e812c55b0c2be8a46d72ca300a683dc27d1d7917ee7742d4d0359c1f", |
||||
logs_bloom: |
||||
"0x00000000000000020000000000002000000400000000000000000000000000000000000000000000040000080004000020000010000000000000000000000000000000000000000008000008000000000000000000200000000000000000000000000000020000000000000000000800000000000000804000000010080000000800000000000000000000000000000000000000000000800000000000080000000008000400000000404000000000000000000000000200000000000000000000000002000000000000001002000000000000002000000008000000000020000000000000000000000000000000000000000000000000400000800000000000", |
||||
miner: "0x0000000000000000000000000000000000000000", |
||||
mix_hash: "0x0000000000000000000000000000000000000000000000000000000000000000", |
||||
nonce: "0x0000000000000000", |
||||
number: 2_848_394, |
||||
parent_hash: "0x20350fc367e19d3865be1ea7da72ab81f8f9941c43ac6bb24a34a0a7caa2f3df", |
||||
receipts_root: "0x6ade4ac1079ea50cfadcce2b75ffbe4f9b14bf69b4607bbf1739463076ca6246", |
||||
sha3_uncles: "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", |
||||
size: 6437, |
||||
state_root: "0x23f63347851bcd109059d007d71e19c4f5e73b7f0862bebcd04458333a004d92", |
||||
timestamp: DateTime.from_unix!(1_534_796_040), |
||||
total_difficulty: 5_353_647, |
||||
transactions: [ |
||||
"0x7e3bb851fc74a436826d2af6b96e4db9484431811ef0d9c9e78370488d33d4e5", |
||||
"0x3976fd1e3d2a715c3cfcfde9bd3210798c26c017b8edb841d319227ecb3322fb", |
||||
"0xd8db124005bb8b6fda7b71fd56ac782552a66af58fe843ba3c4930423b87d1d2", |
||||
"0x10c1a1ca4d9f4b2bd5b89f7bbcbbc2d69e166fe23662b8db4f6beae0f50ac9fd", |
||||
"0xaa58a6545677c796a56b8bc874174c8cfd31a6c6e6ca3a87e086d4f66d52858a" |
||||
], |
||||
transactions_root: "0xde8d25c0b9b54310128a21601331094b43f910f9f96102869c2e2dca94884bf4", |
||||
uncles: [] |
||||
} |
||||
|
||||
describe "transform/1" do |
||||
test "passes the block through unchanged" do |
||||
assert Base.transform(@block) == @block |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,43 @@ |
||||
defmodule Indexer.Block.Transform.CliqueTest do |
||||
use ExUnit.Case |
||||
|
||||
alias Indexer.Block.Transform.Clique |
||||
|
||||
@block %{ |
||||
difficulty: 1, |
||||
extra_data: |
||||
"0xd68301080d846765746886676f312e3130856c696e7578000000000000000000773ab2ca8f47904a14739ad80a75b71d9d29b9fff8b7ecdcb73efffa6f74122f17d304b5dc8e6e5f256c9474dd115c8d4dae31b7a3d409e5c3270f8fde41cd8c00", |
||||
gas_limit: 7_753_377, |
||||
gas_used: 1_810_195, |
||||
hash: "0x7004c895e812c55b0c2be8a46d72ca300a683dc27d1d7917ee7742d4d0359c1f", |
||||
logs_bloom: |
||||
"0x00000000000000020000000000002000000400000000000000000000000000000000000000000000040000080004000020000010000000000000000000000000000000000000000008000008000000000000000000200000000000000000000000000000020000000000000000000800000000000000804000000010080000000800000000000000000000000000000000000000000000800000000000080000000008000400000000404000000000000000000000000200000000000000000000000002000000000000001002000000000000002000000008000000000020000000000000000000000000000000000000000000000000400000800000000000", |
||||
miner_hash: "0x0000000000000000000000000000000000000000", |
||||
mix_hash: "0x0000000000000000000000000000000000000000000000000000000000000000", |
||||
nonce: "0x0000000000000000", |
||||
number: 2_848_394, |
||||
parent_hash: "0x20350fc367e19d3865be1ea7da72ab81f8f9941c43ac6bb24a34a0a7caa2f3df", |
||||
receipts_root: "0x6ade4ac1079ea50cfadcce2b75ffbe4f9b14bf69b4607bbf1739463076ca6246", |
||||
sha3_uncles: "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", |
||||
size: 6437, |
||||
state_root: "0x23f63347851bcd109059d007d71e19c4f5e73b7f0862bebcd04458333a004d92", |
||||
timestamp: DateTime.from_unix!(1_534_796_040), |
||||
total_difficulty: 5_353_647, |
||||
transactions: [ |
||||
"0x7e3bb851fc74a436826d2af6b96e4db9484431811ef0d9c9e78370488d33d4e5", |
||||
"0x3976fd1e3d2a715c3cfcfde9bd3210798c26c017b8edb841d319227ecb3322fb", |
||||
"0xd8db124005bb8b6fda7b71fd56ac782552a66af58fe843ba3c4930423b87d1d2", |
||||
"0x10c1a1ca4d9f4b2bd5b89f7bbcbbc2d69e166fe23662b8db4f6beae0f50ac9fd", |
||||
"0xaa58a6545677c796a56b8bc874174c8cfd31a6c6e6ca3a87e086d4f66d52858a" |
||||
], |
||||
transactions_root: "0xde8d25c0b9b54310128a21601331094b43f910f9f96102869c2e2dca94884bf4", |
||||
uncles: [] |
||||
} |
||||
|
||||
describe "transform/1" do |
||||
test "updates the miner hash with signer address" do |
||||
expected = %{@block | miner_hash: "0xfc18cbc391de84dbd87db83b20935d3e89f5dd91"} |
||||
assert Clique.transform(@block) == expected |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,56 @@ |
||||
defmodule Indexer.Block.TransformTest do |
||||
use ExUnit.Case |
||||
|
||||
alias Indexer.Block.Transform |
||||
|
||||
@block %{ |
||||
difficulty: 1, |
||||
extra_data: |
||||
"0xd68301080d846765746886676f312e3130856c696e7578000000000000000000773ab2ca8f47904a14739ad80a75b71d9d29b9fff8b7ecdcb73efffa6f74122f17d304b5dc8e6e5f256c9474dd115c8d4dae31b7a3d409e5c3270f8fde41cd8c00", |
||||
gas_limit: 7_753_377, |
||||
gas_used: 1_810_195, |
||||
hash: "0x7004c895e812c55b0c2be8a46d72ca300a683dc27d1d7917ee7742d4d0359c1f", |
||||
logs_bloom: |
||||
"0x00000000000000020000000000002000000400000000000000000000000000000000000000000000040000080004000020000010000000000000000000000000000000000000000008000008000000000000000000200000000000000000000000000000020000000000000000000800000000000000804000000010080000000800000000000000000000000000000000000000000000800000000000080000000008000400000000404000000000000000000000000200000000000000000000000002000000000000001002000000000000002000000008000000000020000000000000000000000000000000000000000000000000400000800000000000", |
||||
miner_hash: "0x0000000000000000000000000000000000000000", |
||||
mix_hash: "0x0000000000000000000000000000000000000000000000000000000000000000", |
||||
nonce: "0x0000000000000000", |
||||
number: 2_848_394, |
||||
parent_hash: "0x20350fc367e19d3865be1ea7da72ab81f8f9941c43ac6bb24a34a0a7caa2f3df", |
||||
receipts_root: "0x6ade4ac1079ea50cfadcce2b75ffbe4f9b14bf69b4607bbf1739463076ca6246", |
||||
sha3_uncles: "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", |
||||
size: 6437, |
||||
state_root: "0x23f63347851bcd109059d007d71e19c4f5e73b7f0862bebcd04458333a004d92", |
||||
timestamp: DateTime.from_unix!(1_534_796_040), |
||||
total_difficulty: 5_353_647, |
||||
transactions: [ |
||||
"0x7e3bb851fc74a436826d2af6b96e4db9484431811ef0d9c9e78370488d33d4e5", |
||||
"0x3976fd1e3d2a715c3cfcfde9bd3210798c26c017b8edb841d319227ecb3322fb", |
||||
"0xd8db124005bb8b6fda7b71fd56ac782552a66af58fe843ba3c4930423b87d1d2", |
||||
"0x10c1a1ca4d9f4b2bd5b89f7bbcbbc2d69e166fe23662b8db4f6beae0f50ac9fd", |
||||
"0xaa58a6545677c796a56b8bc874174c8cfd31a6c6e6ca3a87e086d4f66d52858a" |
||||
], |
||||
transactions_root: "0xde8d25c0b9b54310128a21601331094b43f910f9f96102869c2e2dca94884bf4", |
||||
uncles: [] |
||||
} |
||||
|
||||
@blocks [@block, @block] |
||||
|
||||
describe "transform_blocks/1" do |
||||
setup do |
||||
original = Application.get_env(:indexer, :block_transformer) |
||||
|
||||
on_exit(fn -> Application.put_env(:indexer, :block_transformer, original) end) |
||||
end |
||||
|
||||
test "transforms a list of blocks" do |
||||
assert Transform.transform_blocks(@blocks) |
||||
end |
||||
|
||||
test "raises when no transformer is configured" do |
||||
Application.put_env(:indexer, :block_transformer, nil) |
||||
|
||||
assert_raise ArgumentError, fn -> Transform.transform_blocks(@blocks) end |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,40 @@ |
||||
defmodule Indexer.Block.UtilTest do |
||||
use ExUnit.Case |
||||
|
||||
alias Indexer.Block.Util |
||||
|
||||
test "signer/1" do |
||||
data = %{ |
||||
difficulty: 1, |
||||
extra_data: |
||||
"0xd68301080d846765746886676f312e3130856c696e7578000000000000000000773ab2ca8f47904a14739ad80a75b71d9d29b9fff8b7ecdcb73efffa6f74122f17d304b5dc8e6e5f256c9474dd115c8d4dae31b7a3d409e5c3270f8fde41cd8c00", |
||||
gas_limit: 7_753_377, |
||||
gas_used: 1_810_195, |
||||
hash: "0x7004c895e812c55b0c2be8a46d72ca300a683dc27d1d7917ee7742d4d0359c1f", |
||||
logs_bloom: |
||||
"0x00000000000000020000000000002000000400000000000000000000000000000000000000000000040000080004000020000010000000000000000000000000000000000000000008000008000000000000000000200000000000000000000000000000020000000000000000000800000000000000804000000010080000000800000000000000000000000000000000000000000000800000000000080000000008000400000000404000000000000000000000000200000000000000000000000002000000000000001002000000000000002000000008000000000020000000000000000000000000000000000000000000000000400000800000000000", |
||||
miner_hash: "0x0000000000000000000000000000000000000000", |
||||
mix_hash: "0x0000000000000000000000000000000000000000000000000000000000000000", |
||||
nonce: "0x0000000000000000", |
||||
number: 2_848_394, |
||||
parent_hash: "0x20350fc367e19d3865be1ea7da72ab81f8f9941c43ac6bb24a34a0a7caa2f3df", |
||||
receipts_root: "0x6ade4ac1079ea50cfadcce2b75ffbe4f9b14bf69b4607bbf1739463076ca6246", |
||||
sha3_uncles: "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", |
||||
size: 6437, |
||||
state_root: "0x23f63347851bcd109059d007d71e19c4f5e73b7f0862bebcd04458333a004d92", |
||||
timestamp: DateTime.from_unix!(1_534_796_040), |
||||
total_difficulty: 5_353_647, |
||||
transactions: [ |
||||
"0x7e3bb851fc74a436826d2af6b96e4db9484431811ef0d9c9e78370488d33d4e5", |
||||
"0x3976fd1e3d2a715c3cfcfde9bd3210798c26c017b8edb841d319227ecb3322fb", |
||||
"0xd8db124005bb8b6fda7b71fd56ac782552a66af58fe843ba3c4930423b87d1d2", |
||||
"0x10c1a1ca4d9f4b2bd5b89f7bbcbbc2d69e166fe23662b8db4f6beae0f50ac9fd", |
||||
"0xaa58a6545677c796a56b8bc874174c8cfd31a6c6e6ca3a87e086d4f66d52858a" |
||||
], |
||||
transactions_root: "0xde8d25c0b9b54310128a21601331094b43f910f9f96102869c2e2dca94884bf4", |
||||
uncles: [] |
||||
} |
||||
|
||||
assert Util.signer(data) == "0xfc18cbc391de84dbd87db83b20935d3e89f5dd91" |
||||
end |
||||
end |
Loading…
Reference in new issue