From 75065dfd91ae4c455ec89ce8c5a1eaa8a5d1d57b Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Thu, 23 Jan 2020 12:12:13 -0800 Subject: [PATCH] HBASE-23279 : Set ROW_INDEX_V1 as default DataBlock encoding --- .../org/apache/hadoop/hbase/HColumnDescriptor.java | 3 ++- .../client/ColumnFamilyDescriptorBuilder.java | 14 ++++++-------- .../hbase/io/encoding/DataBlockEncoding.java | 3 +++ .../apache/hadoop/hbase/io/hfile/HFileContext.java | 2 +- .../hadoop/hbase/io/hfile/HFileContextBuilder.java | 2 +- .../hbase/io/hfile/HFileDataBlockEncoderImpl.java | 4 ++-- .../hbase/io/hfile/bucket/BucketAllocator.java | 1 + .../hbase/mob/compactions/TestMobCompactor.java | 3 ++- 8 files changed, 18 insertions(+), 14 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index db2b4f5362..43cbab8324 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -80,7 +80,8 @@ public class HColumnDescriptor implements ColumnFamilyDescriptor, Comparable DataBlockEncoding.valueOf(n.toUpperCase()), DataBlockEncoding.NONE); + n -> DataBlockEncoding.valueOf(n.toUpperCase()), DataBlockEncoding + .DEFAULT_DATA_BLOCK_ENCODING); } /** @@ -807,7 +804,8 @@ public class ColumnFamilyDescriptorBuilder { * @return this (for chained invocation) */ public ModifyableColumnFamilyDescriptor setDataBlockEncoding(DataBlockEncoding type) { - return setValue(DATA_BLOCK_ENCODING_BYTES, type == null ? DataBlockEncoding.NONE.name() : type.name()); + return setValue(DATA_BLOCK_ENCODING_BYTES, type == null ? + DataBlockEncoding.DEFAULT_DATA_BLOCK_ENCODING.name() : type.name()); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java index 335488b075..8a19b8d406 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java @@ -79,6 +79,9 @@ public enum DataBlockEncoding { this.encoderCls = encoderClsName; } + public static final DataBlockEncoding DEFAULT_DATA_BLOCK_ENCODING = + DataBlockEncoding.ROW_INDEX_V1; + /** * @return name converted to bytes. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java index ea4782d035..8a2d65bc9b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java @@ -63,7 +63,7 @@ public class HFileContext implements HeapSize, Cloneable { private int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM; /** Number of uncompressed bytes we allow per block. */ private int blocksize = HConstants.DEFAULT_BLOCKSIZE; - private DataBlockEncoding encoding = DataBlockEncoding.NONE; + private DataBlockEncoding encoding = DataBlockEncoding.DEFAULT_DATA_BLOCK_ENCODING; /** Encryption algorithm and key used */ private Encryption.Context cryptoContext = Encryption.Context.NONE; private long fileCreateTime; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java index a44f273c58..cab4285ce8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java @@ -49,7 +49,7 @@ public class HFileContextBuilder { private int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM; /** Number of uncompressed bytes we allow per block. */ private int blocksize = HConstants.DEFAULT_BLOCKSIZE; - private DataBlockEncoding encoding = DataBlockEncoding.NONE; + private DataBlockEncoding encoding = DataBlockEncoding.DEFAULT_DATA_BLOCK_ENCODING; /** Crypto context */ private Encryption.Context cryptoContext = Encryption.Context.NONE; private long fileCreateTime = 0; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java index 347b1f3c59..8fa9f86514 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java @@ -42,7 +42,7 @@ public class HFileDataBlockEncoderImpl implements HFileDataBlockEncoder { * @param encoding What kind of data block encoding will be used. */ public HFileDataBlockEncoderImpl(DataBlockEncoding encoding) { - this.encoding = encoding != null ? encoding : DataBlockEncoding.NONE; + this.encoding = encoding != null ? encoding : DataBlockEncoding.DEFAULT_DATA_BLOCK_ENCODING; } public static HFileDataBlockEncoder createFromFileInfo( @@ -129,7 +129,7 @@ public class HFileDataBlockEncoderImpl implements HFileDataBlockEncoder { @Override public void startBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out) throws IOException { - if (this.encoding != null && this.encoding != DataBlockEncoding.NONE) { + if (this.encoding != DataBlockEncoding.NONE) { this.encoding.getEncoder().startBlockEncoding(encodingCtx, out); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java index 2883ff2f16..7784a2184d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java @@ -305,6 +305,7 @@ public final class BucketAllocator { BucketAllocator(long availableSpace, int[] bucketSizes) throws BucketAllocatorException { this.bucketSizes = bucketSizes == null ? DEFAULT_BUCKET_SIZES : bucketSizes; + LOG.info("BucketSizes during Bucket Allocation: {}", this.bucketSizes); Arrays.sort(this.bucketSizes); this.bigItemSize = Ints.max(this.bucketSizes); this.bucketCapacity = FEWEST_ITEMS_IN_BUCKET * (long) bigItemSize; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java index b8e3ce0d44..60405fc114 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java @@ -325,7 +325,8 @@ public class TestMobCompactor { * (cellNumPerRow * rowNumPerRegion - delCellNum), countMobCells(table)); // After the compaction, the files smaller than the mob compaction merge size // is merge to one file - assertEquals("After compaction: family1 mob file count", largeFilesCount + regionNum, + // after enabling default encoding as ROW_INDEX_V1, expected file count reduced by regionNum + assertEquals("After compaction: family1 mob file count", largeFilesCount, countFiles(tableName, true, family1)); assertEquals("After compaction: family2 mob file count", regionNum * count, countFiles(tableName, true, family2)); -- 2.17.2 (Apple Git-113)