diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java index d862b36..22aad80 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java @@ -39,9 +39,15 @@ public enum ChecksumType { @Override public DataChecksum.Type getDataChecksumType() { return DataChecksum.Type.NULL; } + + @Override + public DataChecksum getChecksumObject(int bytesPerChecksum) { + return null; + } }, CRC32((byte)1) { + @Override public String getName() { return "CRC32"; @@ -50,9 +56,22 @@ public enum ChecksumType { @Override public DataChecksum.Type getDataChecksumType() { return DataChecksum.Type.CRC32; } + + @Override + public DataChecksum getChecksumObject(int bytesPerChecksum) { + DataChecksum checksum = CurCRC32DataChecksum.get(); + if (checksum == null + || (checksum.getBytesPerChecksum() != bytesPerChecksum)) { + checksum = DataChecksum.newDataChecksum(getDataChecksumType(), + bytesPerChecksum); + CurCRC32DataChecksum.set(checksum); + } + return checksum; + } }, CRC32C((byte)2) { + @Override public String getName() { return "CRC32C"; @@ -61,8 +80,24 @@ public enum ChecksumType { @Override public DataChecksum.Type getDataChecksumType() { return DataChecksum.Type.CRC32C; } + + @Override + public DataChecksum getChecksumObject(int bytesPerChecksum) { + DataChecksum checksum = CurCRC32CDataChecksum.get(); + if (checksum == null + || (checksum.getBytesPerChecksum() != bytesPerChecksum)) { + checksum = DataChecksum.newDataChecksum(getDataChecksumType(), + bytesPerChecksum); + CurCRC32CDataChecksum.set(checksum); + } + return checksum; + } }; + private static final ThreadLocal CurCRC32DataChecksum = new ThreadLocal(); + + private static final ThreadLocal CurCRC32CDataChecksum = new ThreadLocal(); + private final byte code; public static ChecksumType getDefaultChecksumType() { @@ -75,6 +110,9 @@ public enum ChecksumType { /** Function to get corresponding {@link org.apache.hadoop.util.DataChecksum.Type}. */ public abstract DataChecksum.Type getDataChecksumType(); + /** returns a object that can be used to generate/validate checksums */ + public abstract DataChecksum getChecksumObject(int bytesPerChecksum); + private ChecksumType(final byte c) { this.code = c; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java index a47cc12..7263c66 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java @@ -69,8 +69,7 @@ public class ChecksumUtil { return; // No checksum for this block. } - DataChecksum checksum = DataChecksum.newDataChecksum( - checksumType.getDataChecksumType(), bytesPerChecksum); + DataChecksum checksum = checksumType.getChecksumObject(bytesPerChecksum); checksum.calculateChunkedSums( ByteBuffer.wrap(indata, startOffset, endOffset - startOffset), @@ -99,8 +98,8 @@ public class ChecksumUtil { // read in the stored value of the checksum size from the header. int bytesPerChecksum = buffer.getInt(HFileBlock.Header.BYTES_PER_CHECKSUM_INDEX); - DataChecksum dataChecksum = DataChecksum.newDataChecksum( - cktype.getDataChecksumType(), bytesPerChecksum); + DataChecksum dataChecksum = cktype.getChecksumObject(bytesPerChecksum); + assert dataChecksum != null; int onDiskDataSizeWithHeader = buffer.getInt(HFileBlock.Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);