diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 7e63ad6..90a4e51 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -18,9 +18,14 @@ */ package org.apache.hadoop.hbase; -import com.google.common.base.Preconditions; -import com.google.protobuf.HBaseZeroCopyByteString; -import com.google.protobuf.InvalidProtocolBufferException; +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -37,14 +42,9 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparable; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; +import com.google.common.base.Preconditions; +import com.google.protobuf.HBaseZeroCopyByteString; +import com.google.protobuf.InvalidProtocolBufferException; /** * An HColumnDescriptor contains information about a column family such as the @@ -194,9 +194,10 @@ public class HColumnDescriptor implements WritableComparable public static final boolean DEFAULT_EVICT_BLOCKS_ON_CLOSE = false; /** - * Default compress tags along with any type of DataBlockEncoding + * Default compress tags along with any type of DataBlockEncoding. + * Disabled to false by default in 0.98.0 */ - public static final boolean DEFAULT_COMPRESS_TAGS = true; + public static final boolean DEFAULT_COMPRESS_TAGS = false; private final static Map DEFAULT_VALUES = new HashMap(); diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java index 98eda31..4f90163 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java @@ -42,7 +42,7 @@ public class HFileContextBuilder { /** Compression algorithm used **/ private Algorithm compression = Algorithm.NONE; /** Whether tags to be compressed or not **/ - private boolean compressTags; + private boolean compressTags = false; /** the checksum type **/ private ChecksumType checksumType = DEFAULT_CHECKSUM_TYPE; /** the number of bytes per checksum value **/ diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index bd9d791..f7dfb17 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -891,11 +891,15 @@ public class HStore implements Store { if (compression == null) { compression = HFile.DEFAULT_COMPRESSION_ALGORITHM; } + if(family.shouldCompressTags()) { + LOG.warn("HFile tag compression attribute ignored for '" + family.getNameAsString() + + "', see HBASE-10443."); + } HFileContext hFileContext = new HFileContextBuilder() .withIncludesMvcc(includeMVCCReadpoint) .withIncludesTags(includesTag) .withCompression(compression) - .withCompressTags(family.shouldCompressTags()) + .withCompressTags(false) .withChecksumType(checksumType) .withBytesPerCheckSum(bytesPerChecksum) .withBlockSize(blocksize)