From 9c576ccdc69be5bd6604e27696987aea818f61b8 Mon Sep 17 00:00:00 2001 From: Karim T Date: Tue, 3 Mar 2020 18:08:44 +0100 Subject: [PATCH] [PIE-2322] Create file if logBloom-current.cache is missing (#438) * create file if logBloom-current.cache is missing Signed-off-by: Karim TAAM * update populateLatestSegment in order to resolve the missing logs issue Signed-off-by: Karim TAAM * add lock for populateLatestSegment Signed-off-by: Karim TAAM --- .../api/query/TransactionLogBloomCacher.java | 48 +++++++++++-------- 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/query/TransactionLogBloomCacher.java b/ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/query/TransactionLogBloomCacher.java index bafbf05fdf..94317544c7 100644 --- a/ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/query/TransactionLogBloomCacher.java +++ b/ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/query/TransactionLogBloomCacher.java @@ -56,6 +56,7 @@ public class TransactionLogBloomCacher { private final Map cachedSegments; private final Lock submissionLock = new ReentrantLock(); + private final Lock populateLastFragmentLock = new ReentrantLock(); private final EthScheduler scheduler; private final Blockchain blockchain; @@ -170,27 +171,36 @@ public class TransactionLogBloomCacher { private boolean populateLatestSegment() { try { - long blockNumber = blockchain.getChainHeadBlockNumber(); - final File currentFile = calculateCacheFileName(CURRENT, cacheDir); - final long segmentNumber = blockNumber / BLOCKS_PER_BLOOM_CACHE; - try (final OutputStream out = new FileOutputStream(currentFile)) { - fillCacheFile(segmentNumber * BLOCKS_PER_BLOOM_CACHE, blockNumber, out); - } - while (blockNumber <= blockchain.getChainHeadBlockNumber() - && (blockNumber % BLOCKS_PER_BLOOM_CACHE != 0)) { - cacheSingleBlock(blockchain.getBlockHeader(blockNumber).orElseThrow(), currentFile); - blockNumber++; + if (populateLastFragmentLock.tryLock(100, TimeUnit.MILLISECONDS)) { + try { + final File currentFile = calculateCacheFileName(CURRENT, cacheDir); + + final long segmentNumber = blockchain.getChainHeadBlockNumber() / BLOCKS_PER_BLOOM_CACHE; + long blockNumber = segmentNumber / BLOCKS_PER_BLOOM_CACHE; + try (final OutputStream out = new FileOutputStream(currentFile)) { + fillCacheFile(segmentNumber * BLOCKS_PER_BLOOM_CACHE, blockNumber, out); + } + while (blockNumber <= blockchain.getChainHeadBlockNumber() + && (blockNumber % BLOCKS_PER_BLOOM_CACHE != 0)) { + cacheSingleBlock(blockchain.getBlockHeader(blockNumber).orElseThrow(), currentFile); + blockNumber++; + } + Files.move( + currentFile.toPath(), + calculateCacheFileName(blockNumber, cacheDir).toPath(), + StandardCopyOption.REPLACE_EXISTING, + StandardCopyOption.ATOMIC_MOVE); + return true; + } catch (final IOException e) { + LOG.error("Unhandled caching exception.", e); + } finally { + populateLastFragmentLock.unlock(); + } } - Files.move( - currentFile.toPath(), - calculateCacheFileName(blockNumber, cacheDir).toPath(), - StandardCopyOption.REPLACE_EXISTING, - StandardCopyOption.ATOMIC_MOVE); - return true; - } catch (final IOException e) { - LOG.error("Unhandled caching exception.", e); - return false; + } catch (final InterruptedException e) { + // ignore } + return false; } private void ensurePreviousSegmentsArePresent(final long blockNumber) {