From 6ee4ea90b66c9a78aacd1a5830088fac18f86d1c Mon Sep 17 00:00:00 2001 From: zhangduo Date: Fri, 20 Mar 2015 22:16:53 +0800 Subject: [PATCH] HBASE-13301 Possible memory leak in BucketCache --- .../hadoop/hbase/io/hfile/bucket/BucketCache.java | 23 +++++++++----- .../hadoop/hbase/io/hfile/CacheTestUtils.java | 2 +- .../hbase/io/hfile/bucket/TestBucketCache.java | 36 ++++++++++++++++++++-- 3 files changed, 50 insertions(+), 11 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index 7dda0e6..3ce8092 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -115,7 +115,8 @@ public class BucketCache implements BlockCache, HeapSize { @VisibleForTesting Map ramCache; // In this map, store the block's meta data like offset, length - private Map backingMap; + @VisibleForTesting + Map backingMap; /** * Flag if the cache is enabled or not... We shut it off if there are IO @@ -182,7 +183,8 @@ public class BucketCache implements BlockCache, HeapSize { * * TODO:We could extend the IdLock to IdReadWriteLock for better. */ - private IdLock offsetLock = new IdLock(); + @VisibleForTesting + IdLock offsetLock = new IdLock(); private final ConcurrentIndex blocksByHFile = new ConcurrentIndex(new Comparator() { @@ -442,6 +444,16 @@ public class BucketCache implements BlockCache, HeapSize { return null; } + @VisibleForTesting + void blockEvicted(BlockCacheKey cacheKey, BucketEntry bucketEntry, boolean decrementBlockNumber) { + bucketAllocator.freeBlock(bucketEntry.offset()); + realCacheSize.addAndGet(-1 * bucketEntry.getLength()); + blocksByHFile.remove(cacheKey.getHfileName(), cacheKey); + if (decrementBlockNumber) { + this.blockNumber.decrementAndGet(); + } + } + @Override public boolean evictBlock(BlockCacheKey cacheKey) { if (!cacheEnabled) return false; @@ -463,12 +475,7 @@ public class BucketCache implements BlockCache, HeapSize { try { lockEntry = offsetLock.getLockEntry(bucketEntry.offset()); if (bucketEntry.equals(backingMap.remove(cacheKey))) { - bucketAllocator.freeBlock(bucketEntry.offset()); - realCacheSize.addAndGet(-1 * bucketEntry.getLength()); - blocksByHFile.remove(cacheKey.getHfileName(), cacheKey); - if (removedBlock == null) { - this.blockNumber.decrementAndGet(); - } + blockEvicted(cacheKey, bucketEntry, removedBlock == null); } else { return false; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java index 5ef8cf0..8118b52 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java @@ -247,7 +247,7 @@ public class CacheTestUtils { assertTrue(toBeTested.getStats().getEvictedCount() > 0); } - private static class ByteArrayCacheable implements Cacheable { + public static class ByteArrayCacheable implements Cacheable { static final CacheableDeserializer blockDeserializer = new CacheableDeserializer() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java index d29be01..2642f01 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.io.hfile.bucket; -import static org.junit.Assert.assertEquals; +import static org.junit.Assert.*; import java.io.FileNotFoundException; import java.io.IOException; @@ -29,6 +29,7 @@ import java.util.Random; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.IdLock; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.CacheTestUtils; import org.apache.hadoop.hbase.io.hfile.Cacheable; @@ -182,4 +183,35 @@ public class TestBucketCache { cache.stopWriterThreads(); CacheTestUtils.testHeapSizeChanges(cache, BLOCK_SIZE); } -} \ No newline at end of file + + @Test + public void testMemoryLeak() throws Exception { + long offset = 1L; + final BlockCacheKey cacheKey = new BlockCacheKey("dummy", offset); + cache.cacheBlock(cacheKey, new CacheTestUtils.ByteArrayCacheable(new byte[10])); + while (!cache.backingMap.containsKey(cacheKey)) { + Thread.sleep(100); + } + IdLock.Entry lockEntry = cache.offsetLock.getLockEntry(cache.backingMap.get(cacheKey).offset()); + Thread evictThread = new Thread("evict-block") { + + @Override + public void run() { + cache.evictBlock(cacheKey); + } + + }; + evictThread.start(); + Thread.sleep(2000); + cache.blockEvicted(cacheKey, cache.backingMap.remove(cacheKey), true); + cache.cacheBlock(cacheKey, new CacheTestUtils.ByteArrayCacheable(new byte[10])); + while (!cache.backingMap.containsKey(cacheKey)) { + Thread.sleep(100); + } + cache.offsetLock.releaseLockEntry(lockEntry); + evictThread.join(); + assertEquals(1L, cache.getBlockCount()); + assertTrue(cache.getCurrentSize() > 0L); + assertTrue("We should have a block!", cache.iterator().hasNext()); + } +} -- 1.9.1