.../apache/hadoop/hbase/io/hfile/ChecksumUtil.java | 5 ++--- .../apache/hadoop/hbase/io/hfile/HFileBlock.java | 21 +++++++++++---------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java index 0e03a42..b9f84e6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java @@ -23,7 +23,6 @@ import java.nio.ByteBuffer; import java.util.zip.Checksum; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; @@ -98,7 +97,7 @@ public class ChecksumUtil { * The header is extracted from the specified HFileBlock while the * data-to-be-verified is extracted from 'data'. */ - static boolean validateBlockChecksum(Path path, HFileBlock block, + static boolean validateBlockChecksum(String pathName, HFileBlock block, byte[] data, int hdrSize) throws IOException { // If this is an older version of the block that does not have @@ -154,7 +153,7 @@ public class ChecksumUtil { int storedChecksum = Bytes.toInt(data, cksumOffset); if (storedChecksum != (int)checksumObject.getValue()) { - String msg = "File " + path + + String msg = "File " + pathName + " Stored checksum value of " + storedChecksum + " at offset " + cksumOffset + " does not match computed checksum " + diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 0a95888..f048c6b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -1315,21 +1315,22 @@ public class HFileBlock implements Cacheable { /** The filesystem used to access data */ protected HFileSystem hfs; - /** The path (if any) where this data is coming from */ - protected Path path; - private final Lock streamLock = new ReentrantLock(); /** The default buffer size for our buffered streams */ public static final int DEFAULT_BUFFER_SIZE = 1 << 20; protected HFileContext fileContext; + // Cache the fileName + protected String pathName; public AbstractFSReader(long fileSize, HFileSystem hfs, Path path, HFileContext fileContext) throws IOException { this.fileSize = fileSize; this.hfs = hfs; - this.path = path; + if (path != null) { + this.pathName = path.toString(); + } this.fileContext = fileContext; this.hdrSize = headerSize(fileContext.isUseHBaseChecksum()); } @@ -1507,13 +1508,13 @@ public class HFileBlock implements Cacheable { doVerificationThruHBaseChecksum); if (blk == null) { HFile.LOG.warn("HBase checksum verification failed for file " + - path + " at offset " + + pathName + " at offset " + offset + " filesize " + fileSize + ". Retrying read with HDFS checksums turned on..."); if (!doVerificationThruHBaseChecksum) { String msg = "HBase checksum verification failed for file " + - path + " at offset " + + pathName + " at offset " + offset + " filesize " + fileSize + " but this cannot happen because doVerify is " + doVerificationThruHBaseChecksum; @@ -1535,13 +1536,13 @@ public class HFileBlock implements Cacheable { doVerificationThruHBaseChecksum); if (blk != null) { HFile.LOG.warn("HDFS checksum verification suceeded for file " + - path + " at offset " + + pathName + " at offset " + offset + " filesize " + fileSize); } } if (blk == null && !doVerificationThruHBaseChecksum) { String msg = "readBlockData failed, possibly due to " + - "checksum verification failed for file " + path + + "checksum verification failed for file " + pathName + " at offset " + offset + " filesize " + fileSize; HFile.LOG.warn(msg); throw new IOException(msg); @@ -1743,7 +1744,7 @@ public class HFileBlock implements Cacheable { */ protected boolean validateBlockChecksum(HFileBlock block, byte[] data, int hdrSize) throws IOException { - return ChecksumUtil.validateBlockChecksum(path, block, data, hdrSize); + return ChecksumUtil.validateBlockChecksum(pathName, block, data, hdrSize); } @Override @@ -1753,7 +1754,7 @@ public class HFileBlock implements Cacheable { @Override public String toString() { - return "hfs=" + hfs + ", path=" + path + ", fileContext=" + fileContext; + return "hfs=" + hfs + ", path=" + pathName + ", fileContext=" + fileContext; } }