Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java (revision 1540245) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java (working copy) @@ -61,6 +61,7 @@ private volatile FSDataInputStream stream = null; private volatile FSDataInputStream streamNoFsChecksum = null; private Object streamNoFsChecksumFirstCreateLock = new Object(); + private Exception e = null; // The configuration states that we should validate hbase checksums private boolean useHBaseChecksumConfigured; @@ -186,9 +187,14 @@ } } + public Exception getException() { + return e; + } + /** Close stream(s) if necessary. */ public void close() throws IOException { if (!doCloseStreams) return; + e = new Exception("in close()"); try { if (stream != streamNoFsChecksum && streamNoFsChecksum != null) { streamNoFsChecksum.close(); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java (revision 1540245) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java (working copy) @@ -1314,6 +1314,9 @@ // guaranteed to use hdfs checksum verification. boolean doVerificationThruHBaseChecksum = streamWrapper.shouldUseHBaseChecksum(); FSDataInputStream is = streamWrapper.getStream(doVerificationThruHBaseChecksum); + if (is == null) { + HFile.LOG.error("NPE istream", streamWrapper.getException()); + } HFileBlock blk = readBlockDataInternal(is, offset, onDiskSizeWithHeaderL,