From 1e7f2416fd987c52a28369b91184980c980036eb Mon Sep 17 00:00:00 2001 From: Andrew Purtell Date: Sun, 1 Nov 2015 12:24:35 -0800 Subject: [PATCH] HBASE-14738 Backport HBASE-11927 (Use Native Hadoop Library for HFile checksum) --- .../apache/hadoop/hbase/io/hfile/HFileContext.java | 3 +- .../hadoop/hbase/io/hfile/HFileContextBuilder.java | 3 +- .../org/apache/hadoop/hbase/util/Checksum.java | 39 ++++ .../apache/hadoop/hbase/util/ChecksumFactory.java | 100 ---------- .../org/apache/hadoop/hbase/util/ChecksumType.java | 90 +-------- hbase-common/src/main/resources/hbase-default.xml | 3 +- hbase-hadoop-compat/pom.xml | 5 + .../apache/hadoop/hbase/util/ChecksumFactory.java | 94 +++++++++ .../org/apache/hadoop/hbase/util/ChecksumImpl.java | 218 +++++++++++++++++++++ .../services/org.apache.hadoop.hbase.util.Checksum | 17 ++ .../org/apache/hadoop/hbase/util/ChecksumImpl.java | 65 ++++++ .../services/org.apache.hadoop.hbase.util.Checksum | 17 ++ .../apache/hadoop/hbase/io/hfile/ChecksumUtil.java | 183 ++++++++++------- .../org/apache/hadoop/hbase/io/hfile/HFile.java | 2 - .../apache/hadoop/hbase/regionserver/HStore.java | 2 +- .../apache/hadoop/hbase/io/hfile/TestChecksum.java | 89 +++++++-- .../hadoop/hbase/io/hfile/TestHFileBlock.java | 7 +- .../hadoop/hbase/io/hfile/TestHFileBlockIndex.java | 1 - 18 files changed, 647 insertions(+), 291 deletions(-) create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/util/Checksum.java delete mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java create mode 100644 hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java create mode 100644 hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumImpl.java create mode 100644 hbase-hadoop1-compat/src/main/resources/META-INF/services/org.apache.hadoop.hbase.util.Checksum create mode 100644 hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumImpl.java create mode 100644 hbase-hadoop2-compat/src/main/resources/META-INF/services/org.apache.hadoop.hbase.util.Checksum diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java index 83fe701..318e531 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java @@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.util.ClassSize; public class HFileContext implements HeapSize, Cloneable { public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024; - public static final ChecksumType DEFAULT_CHECKSUM_TYPE = ChecksumType.CRC32; /** Whether checksum is enabled or not**/ private boolean usesHBaseChecksum = true; @@ -48,7 +47,7 @@ public class HFileContext implements HeapSize, Cloneable { /** Whether tags to be compressed or not**/ private boolean compressTags; /** the checksum type **/ - private ChecksumType checksumType = DEFAULT_CHECKSUM_TYPE; + private ChecksumType checksumType = ChecksumType.getDefaultChecksumType(); /** the number of bytes per checksum value **/ private int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM; /** Number of uncompressed bytes we allow per block. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java index 9a4234a..e3e6c77 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hbase.util.ChecksumType; public class HFileContextBuilder { public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024; - public static final ChecksumType DEFAULT_CHECKSUM_TYPE = ChecksumType.CRC32; /** Whether checksum is enabled or not **/ private boolean usesHBaseChecksum = true; @@ -44,7 +43,7 @@ public class HFileContextBuilder { /** Whether tags to be compressed or not **/ private boolean compressTags = false; /** the checksum type **/ - private ChecksumType checksumType = DEFAULT_CHECKSUM_TYPE; + private ChecksumType checksumType = ChecksumType.getDefaultChecksumType(); /** the number of bytes per checksum value **/ private int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM; /** Number of uncompressed bytes we allow per block. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Checksum.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Checksum.java new file mode 100644 index 0000000..fbcfffd --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Checksum.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.util; + +import java.io.IOException; +import java.nio.ByteBuffer; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +/** + * Interface for processing checksums + */ +@InterfaceAudience.Private +public interface Checksum { + + void init(ChecksumType type, int bytesPerChecksum) throws IOException; + + void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums); + + void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums, String fileName, long basePos) + throws IOException; + +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java deleted file mode 100644 index e27915a..0000000 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java +++ /dev/null @@ -1,100 +0,0 @@ -/** - * Copyright The Apache Software Foundation - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.util; - -import java.io.IOException; -import java.lang.ClassNotFoundException; -import java.util.zip.Checksum; -import java.lang.reflect.Constructor; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; - -/** - * Utility class that is used to generate a Checksum object. - * The Checksum implementation is pluggable and an application - * can specify their own class that implements their own - * Checksum algorithm. - */ -@InterfaceAudience.Private -public class ChecksumFactory { - - static private final Class[] EMPTY_ARRAY = new Class[]{}; - - /** - * Create a new instance of a Checksum object. - * @return The newly created Checksum object - */ - static public Checksum newInstance(String className) throws IOException { - try { - Class clazz = getClassByName(className); - return (Checksum)newInstance(clazz); - } catch (ClassNotFoundException e) { - throw new IOException(e); - } - } - - /** - * Returns a Constructor that can be used to create a Checksum object. - * @param className classname for which an constructor is created - * @return a new Constructor object - */ - static public Constructor newConstructor(String className) - throws IOException { - try { - Class clazz = getClassByName(className); - Constructor ctor = clazz.getDeclaredConstructor(EMPTY_ARRAY); - ctor.setAccessible(true); - return ctor; - } catch (ClassNotFoundException e) { - throw new IOException(e); - } catch (java.lang.NoSuchMethodException e) { - throw new IOException(e); - } - } - - /** Create an object for the given class and initialize it from conf - * - * @param theClass class of which an object is created - * @return a new object - */ - static private T newInstance(Class theClass) { - T result; - try { - Constructor ctor = theClass.getDeclaredConstructor(EMPTY_ARRAY); - ctor.setAccessible(true); - result = ctor.newInstance(); - } catch (Exception e) { - throw new RuntimeException(e); - } - return result; - } - - /** - * Load a class by name. - * @param name the class name. - * @return the class object. - * @throws ClassNotFoundException if the class is not found. - */ - static private Class getClassByName(String name) - throws ClassNotFoundException { - ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); - return Class.forName(name, true, classLoader); - } -} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java index 6c3aef9..c5e6114 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java @@ -18,12 +18,6 @@ package org.apache.hadoop.hbase.util; -import java.io.IOException; -import java.lang.reflect.Constructor; -import java.util.zip.Checksum; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** @@ -40,112 +34,36 @@ public enum ChecksumType { public String getName() { return "NULL"; } - @Override - public void initialize() { - // do nothing - } - @Override - public Checksum getChecksumObject() throws IOException { - return null; // checksums not used - } }, CRC32((byte)1) { - private transient Constructor ctor; - @Override public String getName() { return "CRC32"; } - - @Override - public void initialize() { - final String PURECRC32 = "org.apache.hadoop.util.PureJavaCrc32"; - final String JDKCRC = "java.util.zip.CRC32"; - LOG = LogFactory.getLog(ChecksumType.class); - - // check if hadoop library is available - try { - ctor = ChecksumFactory.newConstructor(PURECRC32); - LOG.info("Checksum using " + PURECRC32); - } catch (Exception e) { - LOG.trace(PURECRC32 + " not available."); - } - try { - // The default checksum class name is java.util.zip.CRC32. - // This is available on all JVMs. - if (ctor == null) { - ctor = ChecksumFactory.newConstructor(JDKCRC); - LOG.info("Checksum can use " + JDKCRC); - } - } catch (Exception e) { - LOG.trace(JDKCRC + " not available."); - } - } - - @Override - public Checksum getChecksumObject() throws IOException { - if (ctor == null) { - throw new IOException("Bad constructor for " + getName()); - } - try { - return (Checksum)ctor.newInstance(); - } catch (Exception e) { - throw new IOException(e); - } - } }, CRC32C((byte)2) { - private transient Constructor ctor; - @Override public String getName() { return "CRC32C"; } - - @Override - public void initialize() { - final String PURECRC32C = "org.apache.hadoop.util.PureJavaCrc32C"; - LOG = LogFactory.getLog(ChecksumType.class); - try { - ctor = ChecksumFactory.newConstructor(PURECRC32C); - LOG.info("Checksum can use " + PURECRC32C); - } catch (Exception e) { - LOG.trace(PURECRC32C + " not available."); - } - } - - @Override - public Checksum getChecksumObject() throws IOException { - if (ctor == null) { - throw new IOException("Bad constructor for " + getName()); - } - try { - return (Checksum)ctor.newInstance(); - } catch (Exception e) { - throw new IOException(e); - } - } }; private final byte code; - protected Log LOG; - /** initializes the relevant checksum class object */ - abstract void initialize(); + public static ChecksumType getDefaultChecksumType() { + // Preserve the default checksum type for <= 0.98 + return ChecksumType.CRC32; + } /** returns the name of this checksum type */ public abstract String getName(); private ChecksumType(final byte c) { this.code = c; - initialize(); } - /** returns a object that can be used to generate/validate checksums */ - public abstract Checksum getChecksumObject() throws IOException; - public byte getCode() { return this.code; } diff --git a/hbase-common/src/main/resources/hbase-default.xml b/hbase-common/src/main/resources/hbase-default.xml index b4c225d..f9dfd11 100644 --- a/hbase-common/src/main/resources/hbase-default.xml +++ b/hbase-common/src/main/resources/hbase-default.xml @@ -1154,7 +1154,8 @@ possible configurations would overwhelm and obscure the important. CRC32 Name of an algorithm that is used to compute checksums. Possible values - are NULL, CRC32, CRC32C. + are NULL, CRC32, CRC32C. The default is CRC32. Set to CRC32C for a potential + decrease in CPU utilization. diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml index c71694b..bbca053 100644 --- a/hbase-hadoop-compat/pom.xml +++ b/hbase-hadoop-compat/pom.xml @@ -116,6 +116,11 @@ org.apache.commons commons-math + + + org.apache.hbase + hbase-common + diff --git a/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java b/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java new file mode 100644 index 0000000..88d82df --- /dev/null +++ b/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java @@ -0,0 +1,94 @@ +/** + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.util; + +import java.io.IOException; +import java.lang.ClassNotFoundException; +import java.util.zip.Checksum; +import java.lang.reflect.Constructor; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class ChecksumFactory { + + static private final Class[] EMPTY_ARRAY = new Class[]{}; + + /** + * Create a new instance of a Checksum object. + * @return The newly created Checksum object + */ + static public Checksum newInstance(String className) throws IOException { + try { + Class clazz = getClassByName(className); + return (Checksum)newInstance(clazz); + } catch (ClassNotFoundException e) { + throw new IOException(e); + } + } + + /** + * Returns a Constructor that can be used to create a Checksum object. + * @param className classname for which an constructor is created + * @return a new Constructor object + */ + static public Constructor newConstructor(String className) + throws IOException { + try { + Class clazz = getClassByName(className); + Constructor ctor = clazz.getDeclaredConstructor(EMPTY_ARRAY); + ctor.setAccessible(true); + return ctor; + } catch (ClassNotFoundException e) { + throw new IOException(e); + } catch (java.lang.NoSuchMethodException e) { + throw new IOException(e); + } + } + + /** Create an object for the given class and initialize it from conf + * + * @param theClass class of which an object is created + * @return a new object + */ + static private T newInstance(Class theClass) { + T result; + try { + Constructor ctor = theClass.getDeclaredConstructor(EMPTY_ARRAY); + ctor.setAccessible(true); + result = ctor.newInstance(); + } catch (Exception e) { + throw new RuntimeException(e); + } + return result; + } + + /** + * Load a class by name. + * @param name the class name. + * @return the class object. + * @throws ClassNotFoundException if the class is not found. + */ + static private Class getClassByName(String name) + throws ClassNotFoundException { + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + return Class.forName(name, true, classLoader); + } +} diff --git a/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumImpl.java b/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumImpl.java new file mode 100644 index 0000000..d36fe7e --- /dev/null +++ b/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumImpl.java @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.util; + +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.nio.ByteBuffer; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.ChecksumException; +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class ChecksumImpl implements Checksum { + final static Log LOG = LogFactory.getLog(ChecksumImpl.class); + final static String PURECRC32 = "org.apache.hadoop.util.PureJavaCrc32"; + final static String PURECRC32C = "org.apache.hadoop.util.PureJavaCrc32C"; + final static String JDKCRC = "java.util.zip.CRC32"; + + private java.util.zip.Checksum cksum = null; + private int bytesPerChecksum; + + @Override + public void init(ChecksumType type, int bytesPerChecksum) throws IOException { + this.bytesPerChecksum = bytesPerChecksum; + Constructor ctor = null; + switch (type) { + case CRC32: + // check if hadoop library is available + try { + ctor = ChecksumFactory.newConstructor(PURECRC32); + if (LOG.isTraceEnabled()) { + LOG.trace("Checksum using " + PURECRC32); + } + } catch (Exception e) { + // Ignore + } + if (ctor == null) { + try { + // This is available on all JVMs + ctor = ChecksumFactory.newConstructor(JDKCRC); + if (LOG.isTraceEnabled()) { + LOG.trace("Checksum using " + JDKCRC); + } + } catch (Exception e) { + throw new IOException("Neither " + PURECRC32 + " nor " + JDKCRC + " are available"); + } + } + try { + cksum = (java.util.zip.Checksum)ctor.newInstance(); + } catch (Exception e) { + throw new IOException(e); + } + break; + case CRC32C: + // check if hadoop library is available + try { + ctor = ChecksumFactory.newConstructor(PURECRC32C); + if (LOG.isTraceEnabled()) { + LOG.trace("Checksum using " + PURECRC32C); + } + } catch (Exception e) { + throw new IOException(PURECRC32C + " not available"); + } + try { + cksum = (java.util.zip.Checksum)ctor.newInstance(); + } catch (Exception e) { + throw new IOException(e); + } + break; + case NULL: + break; + default: + LOG.warn("Unhandled CRC type '" + type + "'"); + break; + } + } + + @Override + public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) { + if (cksum == null) { + return; + } + if (data.hasArray() && checksums.hasArray()) { + calculateChunkedSums(data.array(), data.arrayOffset() + data.position(), + data.remaining(), checksums.array(), checksums.arrayOffset() + checksums.position()); + return; + } + data.mark(); + checksums.mark(); + try { + byte[] buf = new byte[bytesPerChecksum]; + while (data.remaining() > 0) { + int n = Math.min(data.remaining(), bytesPerChecksum); + data.get(buf, 0, n); + cksum.reset(); + cksum.update(buf, 0, n); + checksums.putInt((int)cksum.getValue()); + } + } finally { + data.reset(); + checksums.reset(); + } + } + + /** + * Implementation of chunked calculation specifically on byte arrays. This + * is to avoid the copy when dealing with ByteBuffers that have array backing. + */ + public void calculateChunkedSums(byte[] data, int dataOffset, int dataLength, byte[] sums, + int sumsOffset) { + int remaining = dataLength; + while (remaining > 0) { + int n = Math.min(remaining, bytesPerChecksum); + cksum.reset(); + cksum.update(data, dataOffset, n); + dataOffset += n; + remaining -= n; + long calculated = cksum.getValue(); + sums[sumsOffset++] = (byte)(calculated >> 24); + sums[sumsOffset++] = (byte)(calculated >> 16); + sums[sumsOffset++] = (byte)(calculated >> 8); + sums[sumsOffset++] = (byte)(calculated); + } + } + + @Override + public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums, + String fileName, long basePos) throws IOException { + if (cksum == null) { + return; + } + if (data.hasArray() && checksums.hasArray()) { + verifyChunkedSums( + data.array(), data.arrayOffset() + data.position(), data.remaining(), + checksums.array(), checksums.arrayOffset() + checksums.position(), + fileName, basePos); + return; + } + + int startDataPos = data.position(); + data.mark(); + checksums.mark(); + try { + byte[] buf = new byte[bytesPerChecksum]; + byte[] sum = new byte[Bytes.SIZEOF_INT]; + while (data.remaining() > 0) { + int n = Math.min(data.remaining(), bytesPerChecksum); + checksums.get(sum); + data.get(buf, 0, n); + cksum.reset(); + cksum.update(buf, 0, n); + int calculated = (int)cksum.getValue(); + int stored = (sum[0] << 24 & 0xff000000) | + (sum[1] << 16 & 0xff0000) | + (sum[2] << 8 & 0xff00) | + sum[3] & 0xff; + if (calculated != stored) { + long errPos = basePos + data.position() - startDataPos - n; + throw new ChecksumException( + "Checksum error: "+ fileName + " at "+ errPos + + " exp: " + stored + " got: " + calculated, errPos); + } + } + } finally { + data.reset(); + checksums.reset(); + } + } + + /** + * Implementation of chunked verification specifically on byte arrays. This + * is to avoid the copy when dealing with ByteBuffers that have array backing. + */ + private void verifyChunkedSums( + byte[] data, int dataOff, int dataLen, + byte[] checksums, int checksumsOff, String fileName, + long basePos) throws IOException { + int remaining = dataLen; + int dataPos = 0; + while (remaining > 0) { + int n = Math.min(remaining, bytesPerChecksum); + cksum.reset(); + cksum.update(data, dataOff + dataPos, n); + dataPos += n; + remaining -= n; + int calculated = (int)cksum.getValue(); + int stored = (checksums[checksumsOff] << 24 & 0xff000000) | + (checksums[checksumsOff + 1] << 16 & 0xff0000) | + (checksums[checksumsOff + 2] << 8 & 0xff00) | + checksums[checksumsOff + 3] & 0xff; + checksumsOff += 4; + if (calculated != stored) { + long errPos = basePos + dataPos - n; + throw new ChecksumException( + "Checksum error: "+ fileName + " at "+ errPos + + " exp: " + stored + " got: " + calculated, errPos); + } + } + } +} \ No newline at end of file diff --git a/hbase-hadoop1-compat/src/main/resources/META-INF/services/org.apache.hadoop.hbase.util.Checksum b/hbase-hadoop1-compat/src/main/resources/META-INF/services/org.apache.hadoop.hbase.util.Checksum new file mode 100644 index 0000000..830fae9 --- /dev/null +++ b/hbase-hadoop1-compat/src/main/resources/META-INF/services/org.apache.hadoop.hbase.util.Checksum @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +org.apache.hadoop.hbase.util.ChecksumImpl diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumImpl.java new file mode 100644 index 0000000..488e45a --- /dev/null +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/util/ChecksumImpl.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.util; + +import java.io.IOException; +import java.nio.ByteBuffer; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.util.DataChecksum; + +@InterfaceAudience.Private +public class ChecksumImpl implements Checksum { + private static final Log LOG = LogFactory.getLog(ChecksumImpl.class); + + private DataChecksum cksum = null; + + @Override + public void init(ChecksumType type, int bytesPerChecksum) { + cksum = DataChecksum.newDataChecksum(getType(type), bytesPerChecksum); + } + + private static DataChecksum.Type getType(ChecksumType type) { + switch (type) { + case CRC32: + return DataChecksum.Type.CRC32; + case CRC32C: + return DataChecksum.Type.CRC32C; + case NULL: + return DataChecksum.Type.NULL; + default: + LOG.warn("Unhandled CRC type '" + type + "'"); + return DataChecksum.Type.NULL; + } + } + + @Override + public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) { + cksum.calculateChunkedSums(data, checksums); + } + + @Override + public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums, String fileName, + long basePos) throws IOException { + cksum.verifyChunkedSums(data, checksums, fileName, basePos); + } + +} diff --git a/hbase-hadoop2-compat/src/main/resources/META-INF/services/org.apache.hadoop.hbase.util.Checksum b/hbase-hadoop2-compat/src/main/resources/META-INF/services/org.apache.hadoop.hbase.util.Checksum new file mode 100644 index 0000000..830fae9 --- /dev/null +++ b/hbase-hadoop2-compat/src/main/resources/META-INF/services/org.apache.hadoop.hbase.util.Checksum @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +org.apache.hadoop.hbase.util.ChecksumImpl diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java index 3282213..aea8311 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java @@ -20,16 +20,27 @@ package org.apache.hadoop.hbase.io.hfile; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; -import java.util.zip.Checksum; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.TimeUnit; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.CompatibilityFactory; +import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Checksum; import org.apache.hadoop.hbase.util.ChecksumType; +import org.apache.hadoop.hbase.util.Pair; + +import com.google.common.cache.CacheBuilder; /** * Utility methods to compute and validate checksums. */ +@InterfaceAudience.Private public class ChecksumUtil { + public static final Log LOG = LogFactory.getLog(ChecksumUtil.class); /** This is used to reserve space in a byte buffer */ private static byte[] DUMMY_VALUE = new byte[128 * HFileBlock.CHECKSUM_SIZE]; @@ -43,6 +54,60 @@ public class ChecksumUtil { */ private static boolean generateExceptions = false; + // Cache Checksum instances to avoid CompatibilityFactory and ServiceLoader overheads + + static abstract class Pool { + + private ConcurrentMap> cache; + + public Pool() { + cache = CacheBuilder.newBuilder() + .expireAfterAccess(10, TimeUnit.MINUTES) + .>build() + .asMap(); + } + + public abstract V make(K key) throws IOException; + + public V get(K key) throws IOException { + ConcurrentLinkedQueue list = cache.get(key); + if (list == null) { + list = new ConcurrentLinkedQueue(); + ConcurrentLinkedQueue old = cache.putIfAbsent(key, list); + if (old != null) { + list = old; + } + } + V obj = list.poll(); + if (obj == null) { + obj = make(key); + } + return obj; + } + + public void put(K key, V obj) { + ConcurrentLinkedQueue list = cache.get(key); + if (list == null) { + list = new ConcurrentLinkedQueue(); + ConcurrentLinkedQueue old = cache.putIfAbsent(key, list); + if (old != null) { + list = old; + } + } + list.add(obj); + } + } + + static final Pool, Checksum> pool = + new Pool, Checksum>() { + @Override + public Checksum make(Pair key) throws IOException { + Checksum sum = CompatibilityFactory.getInstance(Checksum.class); + sum.init(key.getFirst(), key.getSecond()); + return sum; + } + }; + /** * Generates a checksum for all the data in indata. The checksum is * written to outdata. @@ -57,32 +122,23 @@ public class ChecksumUtil { * @param checksumType type of checksum * @param bytesPerChecksum number of bytes per checksum value */ - static void generateChecksums(byte[] indata, - int startOffset, int endOffset, - byte[] outdata, int outOffset, - ChecksumType checksumType, + static void generateChecksums(byte[] indata, int startOffset, int endOffset, + byte[] outdata, int outOffset, ChecksumType checksumType, int bytesPerChecksum) throws IOException { if (checksumType == ChecksumType.NULL) { - return; // No checkums for this block. + return; // No checksum for this block. } - Checksum checksum = checksumType.getChecksumObject(); - int bytesLeft = endOffset - startOffset; - int chunkNum = 0; - - while (bytesLeft > 0) { - // generate the checksum for one chunk - checksum.reset(); - int count = Math.min(bytesLeft, bytesPerChecksum); - checksum.update(indata, startOffset, count); - - // write the checksum value to the output buffer. - int cksumValue = (int)checksum.getValue(); - outOffset = Bytes.putInt(outdata, outOffset, cksumValue); - chunkNum++; - startOffset += count; - bytesLeft -= count; + Pair cacheKey = + new Pair(checksumType, bytesPerChecksum); + Checksum checksum = pool.get(cacheKey); + try { + checksum.calculateChunkedSums( + ByteBuffer.wrap(indata, startOffset, endOffset - startOffset), + ByteBuffer.wrap(outdata, outOffset, outdata.length - outOffset)); + } finally { + pool.put(cacheKey, checksum); } } @@ -95,7 +151,7 @@ public class ChecksumUtil { * The header is extracted from the specified HFileBlock while the * data-to-be-verified is extracted from 'data'. */ - static boolean validateBlockChecksum(Path path, HFileBlock block, + static boolean validateBlockChecksum(Path path, HFileBlock block, byte[] data, int hdrSize) throws IOException { // If this is an older version of the block that does not have @@ -112,63 +168,41 @@ public class ChecksumUtil { // set in the HFileBlock header. A ChecksumType.NULL indicates that // the caller is not interested in validating checksums, so we // always return true. - ChecksumType cktype = ChecksumType.codeToType(block.getChecksumType()); - if (cktype == ChecksumType.NULL) { - return true; // No checkums validations needed for this block. + ChecksumType checksumType = ChecksumType.codeToType(block.getChecksumType()); + if (checksumType == ChecksumType.NULL) { + return true; // No checksum validations needed for this block. } - Checksum checksumObject = cktype.getChecksumObject(); - checksumObject.reset(); // read in the stored value of the checksum size from the header. int bytesPerChecksum = block.getBytesPerChecksum(); - // bytesPerChecksum is always larger than the size of the header - if (bytesPerChecksum < hdrSize) { - String msg = "Unsupported value of bytesPerChecksum. " + - " Minimum is " + hdrSize + - " but the configured value is " + bytesPerChecksum; - HFile.LOG.warn(msg); - return false; // cannot happen case, unable to verify checksum + int sizeWithHeader = block.getOnDiskDataSizeWithHeader(); + if (LOG.isTraceEnabled()) { + LOG.trace("length of data = " + data.length + + " OnDiskDataSizeWithHeader = " + sizeWithHeader + + " checksum type = " + checksumType.getName() + + " file =" + path.toString() + + " header size = " + hdrSize + + " bytesPerChecksum = " + bytesPerChecksum); } - // Extract the header and compute checksum for the header. - ByteBuffer hdr = block.getBufferWithHeader(); - checksumObject.update(hdr.array(), hdr.arrayOffset(), hdrSize); - - int off = hdrSize; - int consumed = hdrSize; - int bytesLeft = block.getOnDiskDataSizeWithHeader() - off; - int cksumOffset = block.getOnDiskDataSizeWithHeader(); - - // validate each chunk - while (bytesLeft > 0) { - int thisChunkSize = bytesPerChecksum - consumed; - int count = Math.min(bytesLeft, thisChunkSize); - checksumObject.update(data, off, count); - - int storedChecksum = Bytes.toInt(data, cksumOffset); - if (storedChecksum != (int)checksumObject.getValue()) { - String msg = "File " + path + - " Stored checksum value of " + storedChecksum + - " at offset " + cksumOffset + - " does not match computed checksum " + - checksumObject.getValue() + - ", total data size " + data.length + - " Checksum data range offset " + off + " len " + count + - HFileBlock.toStringHeader(block.getBufferReadOnly()); - HFile.LOG.warn(msg); - if (generateExceptions) { - throw new IOException(msg); // this is only for unit tests - } else { - return false; // checksum validation failure - } + + Pair cacheKey = + new Pair(checksumType, bytesPerChecksum); + Checksum checksum = pool.get(cacheKey); + try { + checksum.verifyChunkedSums(ByteBuffer.wrap(data, 0, sizeWithHeader), + ByteBuffer.wrap(data, sizeWithHeader, data.length - sizeWithHeader), + path.toString(), 0); + return true; // checksum is valid + } catch (IOException e) { + if (generateExceptions) { + throw e; // this is only for unit tests + } else { + return false; // checksum validation failure } - cksumOffset += HFileBlock.CHECKSUM_SIZE; - bytesLeft -= count; - off += count; - consumed = 0; - checksumObject.reset(); + } finally { + pool.put(cacheKey, checksum); } - return true; // checksum is valid } /** @@ -226,5 +260,4 @@ public class ChecksumUtil { public static void generateExceptionForChecksumFailureForTest(boolean value) { generateExceptions = value; } -} - +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index fbf28ff..c64bf74 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -65,7 +65,6 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; import org.apache.hadoop.hbase.protobuf.generated.HFileProtos; import org.apache.hadoop.hbase.util.BloomFilterWriter; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.io.Writable; @@ -181,7 +180,6 @@ public class HFile { * The number of bytes per checksum. */ public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024; - public static final ChecksumType DEFAULT_CHECKSUM_TYPE = ChecksumType.CRC32; // For measuring number of checksum failures static final AtomicLong checksumFailures = new AtomicLong(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 30af403..ac11614 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -419,7 +419,7 @@ public class HStore implements Store { public static ChecksumType getChecksumType(Configuration conf) { String checksumName = conf.get(HConstants.CHECKSUM_TYPE_NAME); if (checksumName == null) { - return HFile.DEFAULT_CHECKSUM_TYPE; + return ChecksumType.getDefaultChecksumType(); } else { return ChecksumType.nameToType(checksumName); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java index d81b16d..4ebc053 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java @@ -22,12 +22,18 @@ package org.apache.hadoop.hbase.io.hfile; import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.GZ; import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.NONE; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; +import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -48,10 +54,6 @@ import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestChecksum { - // change this value to activate more logs - private static final boolean detailedLogging = true; - private static final boolean[] BOOLEAN_VALUES = new boolean[] { false, true }; - private static final Log LOG = LogFactory.getLog(TestHFileBlock.class); static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = { @@ -71,6 +73,73 @@ public class TestChecksum { hfs = (HFileSystem)fs; } + @Test + public void testNewBlocksHaveDefaultChecksum() throws IOException { + Path path = new Path(TEST_UTIL.getDataTestDir(), "default_checksum"); + FSDataOutputStream os = fs.create(path); + HFileContext meta = new HFileContextBuilder().build(); + HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta); + DataOutputStream dos = hbw.startWriting(BlockType.DATA); + for (int i = 0; i < 1000; ++i) + dos.writeInt(i); + hbw.writeHeaderAndData(os); + int totalSize = hbw.getOnDiskSizeWithHeader(); + os.close(); + + // Use hbase checksums. + assertEquals(true, hfs.useHBaseChecksum()); + + FSDataInputStreamWrapper is = new FSDataInputStreamWrapper(fs, path); + meta = new HFileContextBuilder().withHBaseCheckSum(true).build(); + HFileBlock.FSReader hbr = new HFileBlock.FSReaderV2( + is, totalSize, (HFileSystem) fs, path, meta); + HFileBlock b = hbr.readBlockData(0, -1, -1, false); + assertEquals(b.getChecksumType(), ChecksumType.getDefaultChecksumType().getCode()); + } + + /** + * Test all checksum types by writing and reading back blocks. + */ + @Test + public void testAllChecksumTypes() throws IOException { + List cktypes = new ArrayList(Arrays.asList(ChecksumType.values())); + for (Iterator itr = cktypes.iterator(); itr.hasNext(); ) { + ChecksumType cktype = itr.next(); + Path path = new Path(TEST_UTIL.getDataTestDir(), "checksum" + cktype.getName()); + FSDataOutputStream os = fs.create(path); + HFileContext meta = new HFileContextBuilder() + .withChecksumType(cktype).build(); + HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta); + DataOutputStream dos = hbw.startWriting(BlockType.DATA); + for (int i = 0; i < 1000; ++i) + dos.writeInt(i); + hbw.writeHeaderAndData(os); + int totalSize = hbw.getOnDiskSizeWithHeader(); + os.close(); + + // Use hbase checksums. + assertEquals(true, hfs.useHBaseChecksum()); + + FSDataInputStreamWrapper is = new FSDataInputStreamWrapper(fs, path); + meta = new HFileContextBuilder().withHBaseCheckSum(true).build(); + HFileBlock.FSReader hbr = new HFileBlock.FSReaderV2( + is, totalSize, (HFileSystem) fs, path, meta); + HFileBlock b = hbr.readBlockData(0, -1, -1, false); + ByteBuffer data = b.getBufferWithoutHeader(); + for (int i = 0; i < 1000; i++) { + assertEquals(i, data.getInt()); + } + boolean exception_thrown = false; + try { + data.getInt(); + } catch (BufferUnderflowException e) { + exception_thrown = true; + } + assertTrue(exception_thrown); + assertEquals(0, HFile.getChecksumFailuresCount()); + } + } + /** * Introduce checksum failures and check that we can still read * the data @@ -93,7 +162,6 @@ public class TestChecksum { .withCompression(algo) .withIncludesMvcc(true) .withIncludesTags(useTags) - .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM) .build(); HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta); @@ -203,7 +271,6 @@ public class TestChecksum { .withIncludesTags(useTags) .withHBaseCheckSum(true) .withBytesPerCheckSum(bytesPerChecksum) - .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE) .build(); HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta); @@ -261,16 +328,6 @@ public class TestChecksum { } } - /** - * Test to ensure that these is at least one valid checksum implementation - */ - @Test - public void testChecksumAlgorithm() throws IOException { - ChecksumType type = ChecksumType.CRC32; - assertEquals(ChecksumType.nameToType(type.getName()), type); - assertEquals(ChecksumType.valueOf(type.toString()), type); - } - private void validateData(DataInputStream in) throws IOException { // validate data for (int i = 0; i < 1234; i++) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java index 492c7f4..4320ad1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java @@ -222,7 +222,6 @@ public class TestHFileBlock { .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(includesTag) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM) - .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE) .build(); HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta); DataOutputStream dos = hbw.startWriting(blockType); @@ -267,7 +266,8 @@ public class TestHFileBlock { final String correctTestBlockStr = "DATABLK*\\x00\\x00\\x00>\\x00\\x00\\x0F\\xA0\\xFF\\xFF\\xFF\\xFF" + "\\xFF\\xFF\\xFF\\xFF" - + "\\x01\\x00\\x00@\\x00\\x00\\x00\\x00[" + + "\\x0" + ChecksumType.getDefaultChecksumType().getCode() + + "\\x00\\x00@\\x00\\x00\\x00\\x00[" // gzip-compressed block: http://www.gzip.org/zlib/rfc-gzip.html + "\\x1F\\x8B" // gzip magic signature + "\\x08" // Compression method: 8 = "deflate" @@ -312,7 +312,6 @@ public class TestHFileBlock { .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(includesTag) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM) - .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE) .build(); HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta); @@ -395,7 +394,6 @@ public class TestHFileBlock { .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(includesTag) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM) - .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE) .build(); HFileBlock.Writer hbw = new HFileBlock.Writer(dataBlockEncoder, meta); @@ -822,7 +820,6 @@ public class TestHFileBlock { .withIncludesTags(includesTag) .withCompression(compressAlgo) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM) - .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE) .build(); HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta); Map prevOffsetByType = new HashMap(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index ef2917e..c3272ef 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -247,7 +247,6 @@ public class TestHFileBlockIndex { .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(useTags) .withCompression(compr) - .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM) .build(); HFileBlock.Writer hbw = new HFileBlock.Writer(null, -- 2.2.2