Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java (revision 1539194) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java (working copy) @@ -24,6 +24,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FileLink; +import org.apache.hadoop.hbase.util.Pair; import com.google.common.annotations.VisibleForTesting; @@ -147,11 +148,11 @@ /** * Get the stream to use. Thread-safe. - * @param useHBaseChecksum must be the value that shouldUseHBaseChecksum has returned - * at some point in the past, otherwise the result is undefined. */ - public FSDataInputStream getStream(boolean useHBaseChecksum) { - return useHBaseChecksum ? this.streamNoFsChecksum : this.stream; + public Pair getStream() { + return useHBaseChecksum ? + new Pair(this.streamNoFsChecksum, useHBaseChecksum) : + new Pair(this.stream, useHBaseChecksum); } /** Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java (revision 1539194) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java (working copy) @@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.CompoundBloomFilter; +import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.IOUtils; import com.google.common.base.Preconditions; @@ -1312,8 +1313,9 @@ // thread-safe but the one constaint is that if we decide // to skip hbase checksum verification then we are // guaranteed to use hdfs checksum verification. - boolean doVerificationThruHBaseChecksum = streamWrapper.shouldUseHBaseChecksum(); - FSDataInputStream is = streamWrapper.getStream(doVerificationThruHBaseChecksum); + Pair pair = streamWrapper.getStream(); + boolean doVerificationThruHBaseChecksum = pair.getSecond(); + FSDataInputStream is = pair.getFirst(); HFileBlock blk = readBlockDataInternal(is, offset, onDiskSizeWithHeaderL, Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (revision 1539194) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (working copy) @@ -65,6 +65,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.Writable; import com.google.protobuf.ZeroCopyLiteralByteString; @@ -520,9 +521,10 @@ HFileSystem hfs) throws IOException { FixedFileTrailer trailer = null; try { - boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum(); + Pair pair = fsdis.getStream(); + boolean isHBaseChecksum = pair.getSecond(); assert !isHBaseChecksum; // Initially we must read with FS checksum. - trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size); + trailer = FixedFileTrailer.readFromStream(pair.getFirst(), size); switch (trailer.getMajorVersion()) { case 2: return new HFileReaderV2(