Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java (revision 1183219) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java (working copy) @@ -101,7 +101,7 @@ LOG.info("Got expected exception", ioe); assertTrue(ioe.getMessage().contains("Fault")); } - reader.close(); + reader.close(true); // end of test so evictOnClose } /** Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java (revision 1183219) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java (working copy) @@ -268,7 +268,7 @@ } } - r.close(); + r.close(true); // end of test so evictOnClose } private boolean isInBloom(StoreFileScanner scanner, byte[] row, BloomType bt, Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java (revision 1183219) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java (working copy) @@ -321,10 +321,10 @@ assertTrue(count == 0); } finally { if (top != null) { - top.close(); + top.close(true); // evict since we are about to delete the file } if (bottom != null) { - bottom.close(); + bottom.close(true); // evict since we are about to delete the file } fs.delete(f.getPath(), true); } @@ -370,7 +370,7 @@ if (exists) falsePos++; } } - reader.close(); + reader.close(true); // evict because we are about to delete the file fs.delete(f, true); assertEquals("False negatives: " + falseNeg, 0, falseNeg); int maxFalsePos = (int) (2 * 2000 * err); @@ -466,7 +466,7 @@ } } } - reader.close(); + reader.close(true); // evict because we are about to delete the file fs.delete(f, true); System.out.println(bt[x].toString()); System.out.println(" False negatives: " + falseNeg); @@ -677,7 +677,7 @@ assertEquals(startEvicted, cs.getEvictedCount()); startMiss += 3; scanner.close(); - reader.close(); + reader.close(cacheConf.shouldEvictOnClose()); // Now write a StoreFile with three blocks, with cache on write on conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true); @@ -697,7 +697,7 @@ assertEquals(startEvicted, cs.getEvictedCount()); startHit += 3; scanner.close(); - reader.close(); + reader.close(cacheConf.shouldEvictOnClose()); // Let's read back the two files to ensure the blocks exactly match hsf = new StoreFile(this.fs, pathCowOff, conf, cacheConf, @@ -730,9 +730,9 @@ assertEquals(startEvicted, cs.getEvictedCount()); startHit += 6; scannerOne.close(); - readerOne.close(); + readerOne.close(cacheConf.shouldEvictOnClose()); scannerTwo.close(); - readerTwo.close(); + readerTwo.close(cacheConf.shouldEvictOnClose()); // Let's close the first file with evict on close turned on conf.setBoolean("hbase.rs.evictblocksonclose", true); @@ -740,7 +740,7 @@ hsf = new StoreFile(this.fs, pathCowOff, conf, cacheConf, StoreFile.BloomType.NONE); reader = hsf.createReader(); - reader.close(); + reader.close(cacheConf.shouldEvictOnClose()); // We should have 3 new evictions assertEquals(startHit, cs.getHitCount()); @@ -754,7 +754,7 @@ hsf = new StoreFile(this.fs, pathCowOn, conf, cacheConf, StoreFile.BloomType.NONE); reader = hsf.createReader(); - reader.close(); + reader.close(cacheConf.shouldEvictOnClose()); // We expect no changes assertEquals(startHit, cs.getHitCount()); Index: src/main/java/org/apache/hadoop/hbase/regionserver/Store.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/Store.java (revision 1183219) +++ src/main/java/org/apache/hadoop/hbase/regionserver/Store.java (working copy) @@ -421,7 +421,7 @@ storefiles = ImmutableList.of(); for (StoreFile f: result) { - f.closeReader(); + f.closeReader(true); } LOG.debug("closed " + this.storeNameStr); return result; @@ -1215,7 +1215,7 @@ throw e; } finally { if (storeFile != null) { - storeFile.closeReader(); + storeFile.closeReader(false); } } } Index: src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java (revision 1183219) +++ src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java (working copy) @@ -526,11 +526,13 @@ } /** + * @param b * @throws IOException */ - public synchronized void closeReader() throws IOException { + public synchronized void closeReader(boolean evictOnClose) + throws IOException { if (this.reader != null) { - this.reader.close(); + this.reader.close(evictOnClose); this.reader = null; } } @@ -540,7 +542,7 @@ * @throws IOException */ public void deleteReader() throws IOException { - closeReader(); + closeReader(true); this.fs.delete(getPath(), true); } @@ -1011,8 +1013,8 @@ return reader.getScanner(cacheBlocks, pread); } - public void close() throws IOException { - reader.close(); + public void close(boolean evictOnClose) throws IOException { + reader.close(evictOnClose); } public boolean shouldSeek(Scan scan, final SortedSet columns) { Index: src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java (revision 1183219) +++ src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java (working copy) @@ -409,7 +409,7 @@ } } finally { if (halfWriter != null) halfWriter.close(); - if (halfReader != null) halfReader.close(); + if (halfReader != null) halfReader.close(cacheConf.shouldEvictOnClose()); } } Index: src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java (revision 1183219) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java (working copy) @@ -348,7 +348,12 @@ @Override public void close() throws IOException { - if (cacheConf.shouldEvictOnClose()) { + close(cacheConf.shouldEvictOnClose()); + } + + @Override + public void close(boolean evictOnClose) throws IOException { + if (evictOnClose) { int numEvicted = 0; for (int i = 0; i < dataBlockIndexReader.getRootBlockCount(); i++) { if (cacheConf.getBlockCache().evictBlock(HFile.getBlockCacheKey(name, Index: src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java (revision 1183219) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java (working copy) @@ -293,7 +293,11 @@ @Override public void close() throws IOException { - if (cacheConf.shouldEvictOnClose()) { + close(cacheConf.shouldEvictOnClose()); + } + + public void close(boolean evictOnClose) throws IOException { + if (evictOnClose) { int numEvicted = cacheConf.getBlockCache().evictBlocksByPrefix(name + HFile.CACHE_KEY_SEPARATOR); LOG.debug("On close of file " + name + " evicted " + numEvicted Index: src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (revision 1183219) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (working copy) @@ -322,6 +322,9 @@ DataInput getBloomFilterMetadata() throws IOException; Path getPath(); + + /** Close method with optional evictOnClose */ + void close(boolean evictOnClose) throws IOException; } private static Reader pickReaderVersion(Path path, FSDataInputStream fsdis, Index: src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java (revision 1183219) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java (working copy) @@ -26,6 +26,8 @@ import java.util.ArrayList; import java.util.List; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -42,6 +44,7 @@ * Writes HFile format version 2. */ public class HFileWriterV2 extends AbstractHFileWriter { + static final Log LOG = LogFactory.getLog(HFileWriterV2.class); /** Inline block writers for multi-level block index and compound Blooms. */ private List inlineBlockWriters = @@ -174,6 +177,8 @@ // Meta data block index writer metaBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter(); + + LOG.debug("HFileWriter initialized with " + cacheConf); } /**