From 31a12e77fdba3c293d92ae20608cbf957eebaa53 Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Mon, 18 Nov 2019 23:34:37 +0530 Subject: [PATCH] HBASE-23279 : ROW_INDEX_V1 as default DataBlock encoding --- .../hbase/client/ColumnFamilyDescriptorBuilder.java | 8 +++++--- .../org/apache/hadoop/hbase/io/hfile/HFileContext.java | 2 +- .../apache/hadoop/hbase/io/hfile/HFileContextBuilder.java | 2 +- .../hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java | 4 ++-- .../hadoop/hbase/io/hfile/bucket/BucketAllocator.java | 1 + .../apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java | 4 +++- 6 files changed, 13 insertions(+), 8 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java index 0b78c9d4d5..c7e00624a4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java @@ -186,7 +186,8 @@ public class ColumnFamilyDescriptorBuilder { /** * Default data block encoding algorithm. */ - public static final DataBlockEncoding DEFAULT_DATA_BLOCK_ENCODING = DataBlockEncoding.NONE; + public static final DataBlockEncoding DEFAULT_DATA_BLOCK_ENCODING = + DataBlockEncoding.ROW_INDEX_V1; /** * Default number of versions of a record to keep. @@ -797,7 +798,7 @@ public class ColumnFamilyDescriptorBuilder { @Override public DataBlockEncoding getDataBlockEncoding() { return getStringOrDefault(DATA_BLOCK_ENCODING_BYTES, - n -> DataBlockEncoding.valueOf(n.toUpperCase()), DataBlockEncoding.NONE); + n -> DataBlockEncoding.valueOf(n.toUpperCase()), DataBlockEncoding.ROW_INDEX_V1); } /** @@ -807,7 +808,8 @@ public class ColumnFamilyDescriptorBuilder { * @return this (for chained invocation) */ public ModifyableColumnFamilyDescriptor setDataBlockEncoding(DataBlockEncoding type) { - return setValue(DATA_BLOCK_ENCODING_BYTES, type == null ? DataBlockEncoding.NONE.name() : type.name()); + return setValue(DATA_BLOCK_ENCODING_BYTES, type == null ? + DataBlockEncoding.ROW_INDEX_V1.name() : type.name()); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java index d606497e2f..65aa8538bd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java @@ -60,7 +60,7 @@ public class HFileContext implements HeapSize, Cloneable { private int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM; /** Number of uncompressed bytes we allow per block. */ private int blocksize = HConstants.DEFAULT_BLOCKSIZE; - private DataBlockEncoding encoding = DataBlockEncoding.NONE; + private DataBlockEncoding encoding = DataBlockEncoding.ROW_INDEX_V1; /** Encryption algorithm and key used */ private Encryption.Context cryptoContext = Encryption.Context.NONE; private long fileCreateTime; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java index 5fa56264f3..d8e9279ca6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java @@ -48,7 +48,7 @@ public class HFileContextBuilder { private int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM; /** Number of uncompressed bytes we allow per block. */ private int blocksize = HConstants.DEFAULT_BLOCKSIZE; - private DataBlockEncoding encoding = DataBlockEncoding.NONE; + private DataBlockEncoding encoding = DataBlockEncoding.ROW_INDEX_V1; /** Crypto context */ private Encryption.Context cryptoContext = Encryption.Context.NONE; private long fileCreateTime = 0; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java index 347b1f3c59..dca596d691 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java @@ -42,7 +42,7 @@ public class HFileDataBlockEncoderImpl implements HFileDataBlockEncoder { * @param encoding What kind of data block encoding will be used. */ public HFileDataBlockEncoderImpl(DataBlockEncoding encoding) { - this.encoding = encoding != null ? encoding : DataBlockEncoding.NONE; + this.encoding = encoding != null ? encoding : DataBlockEncoding.ROW_INDEX_V1; } public static HFileDataBlockEncoder createFromFileInfo( @@ -129,7 +129,7 @@ public class HFileDataBlockEncoderImpl implements HFileDataBlockEncoder { @Override public void startBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out) throws IOException { - if (this.encoding != null && this.encoding != DataBlockEncoding.NONE) { + if (this.encoding != DataBlockEncoding.NONE) { this.encoding.getEncoder().startBlockEncoding(encodingCtx, out); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java index 2883ff2f16..7784a2184d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java @@ -305,6 +305,7 @@ public final class BucketAllocator { BucketAllocator(long availableSpace, int[] bucketSizes) throws BucketAllocatorException { this.bucketSizes = bucketSizes == null ? DEFAULT_BUCKET_SIZES : bucketSizes; + LOG.info("BucketSizes during Bucket Allocation: {}", this.bucketSizes); Arrays.sort(this.bucketSizes); this.bigItemSize = Ints.max(this.bucketSizes); this.bucketCapacity = FEWEST_ITEMS_IN_BUCKET * (long) bigItemSize; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java index 4f4d36b2bc..44789669d8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -127,7 +128,8 @@ public class TestHFileWriterV3 { HFileContext context = new HFileContextBuilder() .withBlockSize(4096) .withIncludesTags(useTags) - .withCompression(compressAlgo).build(); + .withCompression(compressAlgo) + .withDataBlockEncoding(DataBlockEncoding.NONE).build(); CacheConfig cacheConfig = new CacheConfig(conf); HFile.Writer writer = new HFile.WriterFactory(conf, cacheConfig) .withPath(fs, hfilePath) -- 2.17.2 (Apple Git-113)