[PIE-2322] Create file if logBloom-current.cache is missing (#438)

* create file if logBloom-current.cache is missing

Signed-off-by: Karim TAAM <karim.t2am@gmail.com>

* update populateLatestSegment in order to resolve the missing logs issue

Signed-off-by: Karim TAAM <karim.t2am@gmail.com>

* add lock for populateLatestSegment

Signed-off-by: Karim TAAM <karim.t2am@gmail.com>
pull/423/head
Karim T 5 years ago committed by GitHub
parent 634404c626
commit 9c576ccdc6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 48
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/query/TransactionLogBloomCacher.java

@ -56,6 +56,7 @@ public class TransactionLogBloomCacher {
private final Map<Long, Boolean> cachedSegments; private final Map<Long, Boolean> cachedSegments;
private final Lock submissionLock = new ReentrantLock(); private final Lock submissionLock = new ReentrantLock();
private final Lock populateLastFragmentLock = new ReentrantLock();
private final EthScheduler scheduler; private final EthScheduler scheduler;
private final Blockchain blockchain; private final Blockchain blockchain;
@ -170,27 +171,36 @@ public class TransactionLogBloomCacher {
private boolean populateLatestSegment() { private boolean populateLatestSegment() {
try { try {
long blockNumber = blockchain.getChainHeadBlockNumber(); if (populateLastFragmentLock.tryLock(100, TimeUnit.MILLISECONDS)) {
final File currentFile = calculateCacheFileName(CURRENT, cacheDir); try {
final long segmentNumber = blockNumber / BLOCKS_PER_BLOOM_CACHE; final File currentFile = calculateCacheFileName(CURRENT, cacheDir);
try (final OutputStream out = new FileOutputStream(currentFile)) {
fillCacheFile(segmentNumber * BLOCKS_PER_BLOOM_CACHE, blockNumber, out); final long segmentNumber = blockchain.getChainHeadBlockNumber() / BLOCKS_PER_BLOOM_CACHE;
} long blockNumber = segmentNumber / BLOCKS_PER_BLOOM_CACHE;
while (blockNumber <= blockchain.getChainHeadBlockNumber() try (final OutputStream out = new FileOutputStream(currentFile)) {
&& (blockNumber % BLOCKS_PER_BLOOM_CACHE != 0)) { fillCacheFile(segmentNumber * BLOCKS_PER_BLOOM_CACHE, blockNumber, out);
cacheSingleBlock(blockchain.getBlockHeader(blockNumber).orElseThrow(), currentFile); }
blockNumber++; while (blockNumber <= blockchain.getChainHeadBlockNumber()
&& (blockNumber % BLOCKS_PER_BLOOM_CACHE != 0)) {
cacheSingleBlock(blockchain.getBlockHeader(blockNumber).orElseThrow(), currentFile);
blockNumber++;
}
Files.move(
currentFile.toPath(),
calculateCacheFileName(blockNumber, cacheDir).toPath(),
StandardCopyOption.REPLACE_EXISTING,
StandardCopyOption.ATOMIC_MOVE);
return true;
} catch (final IOException e) {
LOG.error("Unhandled caching exception.", e);
} finally {
populateLastFragmentLock.unlock();
}
} }
Files.move( } catch (final InterruptedException e) {
currentFile.toPath(), // ignore
calculateCacheFileName(blockNumber, cacheDir).toPath(),
StandardCopyOption.REPLACE_EXISTING,
StandardCopyOption.ATOMIC_MOVE);
return true;
} catch (final IOException e) {
LOG.error("Unhandled caching exception.", e);
return false;
} }
return false;
} }
private void ensurePreviousSegmentsArePresent(final long blockNumber) { private void ensurePreviousSegmentsArePresent(final long blockNumber) {

Loading…
Cancel
Save