diff --git src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index 3812e9c..3a5673e 100644 --- src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -193,11 +193,11 @@ public class LruBlockCache implements BlockCache, HeapSize { public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) { this(maxSize, blockSize, evictionThread, (int)Math.ceil(1.2*maxSize/blockSize), - DEFAULT_LOAD_FACTOR, + DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL, - conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR), - conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR), - DEFAULT_SINGLE_FACTOR, + conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR), + conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR), + DEFAULT_SINGLE_FACTOR, DEFAULT_MULTI_FACTOR, DEFAULT_MEMORY_FACTOR); } @@ -276,7 +276,9 @@ public class LruBlockCache implements BlockCache, HeapSize { public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) { CachedBlock cb = map.get(cacheKey); if(cb != null) { - throw new RuntimeException("Cached an already cached block"); + String msg = "Cached an already cached block: " + cacheKey + " cb:" + cb.getCacheKey(); + LOG.warn(msg); + assert false : msg; } cb = new CachedBlock(cacheKey, buf, count.incrementAndGet(), inMemory); long newSize = updateSizeMetrics(cb, false); diff --git src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java index 3a581ef..7d89531 100644 --- src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java +++ src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java @@ -27,7 +27,6 @@ import java.util.Collection; import java.util.Map; import java.util.Random; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.io.HeapSize; @@ -149,8 +148,9 @@ public class TestLruBlockCache { try { cache.cacheBlock(block.cacheKey, block); assertTrue("Cache should not allow re-caching a block", false); - } catch(RuntimeException re) { + } catch(AssertionError re) { // expected + assertTrue(re.getMessage().contains("Cached an already cached block")); } } diff --git src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java index 478bfbd..4038974 100644 --- src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java +++ src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java @@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.util; +import static org.junit.Assert.assertTrue; + import java.util.Map; import java.util.Random; import java.util.concurrent.Callable; @@ -28,12 +30,10 @@ import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - -import static org.junit.Assert.*; - import org.apache.hadoop.hbase.MediumTests; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -106,6 +106,7 @@ public class TestIdLock { idLock.assertMapEmpty(); } finally { exec.shutdown(); + exec.awaitTermination(5000, TimeUnit.MILLISECONDS); } }