From 755a1ab19bef9bf85b3ef683266997ecfaa3d56d Mon Sep 17 00:00:00 2001 From: Geoffrey Jacoby Date: Tue, 5 Nov 2019 14:30:53 -0800 Subject: [PATCH] HBASE-23251 - Add Column Family and Table Names to HFileContext and use in HFileWriterImpl logging --- .../hadoop/hbase/io/hfile/HFileContext.java | 24 ++++++++++++++++--- .../hbase/io/hfile/HFileContextBuilder.java | 16 ++++++++++++- .../hbase/mapreduce/HFileOutputFormat2.java | 4 +++- .../hadoop/hbase/io/hfile/HFileBlock.java | 2 ++ .../hbase/io/hfile/HFileWriterImpl.java | 17 ++++++++++--- .../hadoop/hbase/regionserver/HStore.java | 3 +++ .../hadoop/hbase/regionserver/TestHStore.java | 11 +++++++++ .../hadoop/hbase/util/HFileTestUtil.java | 1 + 8 files changed, 70 insertions(+), 8 deletions(-) diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java index 65649f4405..2aaff23fc0 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java @@ -63,6 +63,8 @@ public class HFileContext implements HeapSize, Cloneable { private Encryption.Context cryptoContext = Encryption.Context.NONE; private long fileCreateTime; private String hfileName; + private byte[] columnFamily; + private byte[] tableName; //Empty constructor. Go with setters public HFileContext() { @@ -85,12 +87,15 @@ public class HFileContext implements HeapSize, Cloneable { this.cryptoContext = context.cryptoContext; this.fileCreateTime = context.fileCreateTime; this.hfileName = context.hfileName; + this.columnFamily = context.columnFamily; + this.tableName = context.tableName; } HFileContext(boolean useHBaseChecksum, boolean includesMvcc, boolean includesTags, - Compression.Algorithm compressAlgo, boolean compressTags, ChecksumType checksumType, - int bytesPerChecksum, int blockSize, DataBlockEncoding encoding, - Encryption.Context cryptoContext, long fileCreateTime, String hfileName) { + Compression.Algorithm compressAlgo, boolean compressTags, ChecksumType checksumType, + int bytesPerChecksum, int blockSize, DataBlockEncoding encoding, + Encryption.Context cryptoContext, long fileCreateTime, String hfileName, + byte[] columnFamily, byte[] tableName) { this.usesHBaseChecksum = useHBaseChecksum; this.includesMvcc = includesMvcc; this.includesTags = includesTags; @@ -105,6 +110,8 @@ public class HFileContext implements HeapSize, Cloneable { this.cryptoContext = cryptoContext; this.fileCreateTime = fileCreateTime; this.hfileName = hfileName; + this.columnFamily = columnFamily; + this.tableName = tableName; } /** @@ -192,6 +199,9 @@ public class HFileContext implements HeapSize, Cloneable { return this.hfileName; } + public byte[] getColumnFamily() { return this.columnFamily; } + + public byte[] getTableName() { return this.tableName; } /** * HeapSize implementation. NOTE : The heap size should be altered when new state variable are * added. @@ -233,6 +243,14 @@ public class HFileContext implements HeapSize, Cloneable { sb.append(", name="); sb.append(hfileName); } + if (tableName != null) { + sb.append(", tableName="); + sb.append(Bytes.toString(tableName)); + } + if (columnFamily != null) { + sb.append(", columnFamily="); + sb.append(Bytes.toString(columnFamily)); + } sb.append("]"); return sb.toString(); } diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java index 24e23e81a2..5fa56264f3 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java @@ -54,6 +54,8 @@ public class HFileContextBuilder { private long fileCreateTime = 0; private String hfileName = null; + private byte[] columnFamily = null; + private byte[] tableName = null; public HFileContextBuilder() {} @@ -73,6 +75,8 @@ public class HFileContextBuilder { this.cryptoContext = hfc.getEncryptionContext(); this.fileCreateTime = hfc.getFileCreateTime(); this.hfileName = hfc.getHFileName(); + this.columnFamily = hfc.getColumnFamily(); + this.tableName = hfc.getTableName(); } public HFileContextBuilder withHBaseCheckSum(boolean useHBaseCheckSum) { @@ -135,9 +139,19 @@ public class HFileContextBuilder { return this; } + public HFileContextBuilder withColumnFamily(byte[] columnFamily){ + this.columnFamily = columnFamily; + return this; + } + + public HFileContextBuilder withTableName(byte[] tableName){ + this.tableName = tableName; + return this; + } + public HFileContext build() { return new HFileContext(usesHBaseChecksum, includesMvcc, includesTags, compression, compressTags, checksumType, bytesPerChecksum, blocksize, encoding, cryptoContext, - fileCreateTime, hfileName); + fileCreateTime, hfileName, columnFamily, tableName); } } diff --git hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index b49903879e..35a905212b 100644 --- hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -412,7 +412,9 @@ public class HFileOutputFormat2 .withCompression(compression) .withChecksumType(HStore.getChecksumType(conf)) .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)) - .withBlockSize(blockSize); + .withBlockSize(blockSize) + .withColumnFamily(family) + .withTableName(tableName); if (HFile.getFormatVersion(conf) >= HFile.MIN_FORMAT_VERSION_WITH_TAGS) { contextBuilder.withIncludesTags(true); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index a723e524a7..ba524b14dd 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -1267,6 +1267,8 @@ public class HFileBlock implements Cacheable { .withCompressTags(fileContext.isCompressTags()) .withIncludesMvcc(fileContext.isIncludesMvcc()) .withIncludesTags(fileContext.isIncludesTags()) + .withColumnFamily(fileContext.getColumnFamily()) + .withTableName(fileContext.getTableName()) .build(); // Build the HFileBlock. HFileBlockBuilder builder = new HFileBlockBuilder(); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java index 93cca8bd36..55dc606f01 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java @@ -242,10 +242,9 @@ public class HFileWriterImpl implements HFile.Writer { } if (lastCell != null) { int keyComp = PrivateCellUtil.compareKeyIgnoresMvcc(comparator, lastCell, cell); - if (keyComp > 0) { - throw new IOException("Added a key not lexically larger than" - + " previous. Current cell = " + cell + ", lastCell = " + lastCell); + String message = getLexicalErrorMessage(cell); + throw new IOException(message); } else if (keyComp == 0) { isDuplicateKey = true; } @@ -253,6 +252,18 @@ public class HFileWriterImpl implements HFile.Writer { return isDuplicateKey; } + private String getLexicalErrorMessage(Cell cell) { + StringBuilder sb = new StringBuilder(); + sb.append("Added a key not lexically larger than previous. Current cell = "); + sb.append(cell); + sb.append(", lastCell = "); + sb.append(lastCell); + //file context includes HFile path and optionally table and CF of file being written + sb.append("fileContext="); + sb.append(hFileContext); + return sb.toString(); + } + /** Checks the given value for validity. */ protected void checkValue(final byte[] value, final int offset, final int length) throws IOException { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index b007898e96..bb1bce3029 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -1163,6 +1163,9 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat .withDataBlockEncoding(family.getDataBlockEncoding()) .withEncryptionContext(cryptoContext) .withCreateTime(EnvironmentEdgeManager.currentTime()) + .withColumnFamily(family.getName()) + .withTableName(region.getTableDescriptor() + .getTableName().getName()) .build(); return hFileContext; } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java index 21043491e0..db86a49f6c 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java @@ -165,6 +165,7 @@ public class TestHStore { */ @Before public void setUp() throws IOException { + qualifiers.clear(); qualifiers.add(qf1); qualifiers.add(qf3); qualifiers.add(qf5); @@ -1704,6 +1705,16 @@ public class TestHStore { assertEquals(8192L, sizeStore.getRegionSize(regionInfo2).getSize()); } + @Test + public void testHFileContextSetWithCFAndTable() throws Exception { + init(this.name.getMethodName()); + StoreFileWriter writer = store.createWriterInTmp(10000L, + Compression.Algorithm.NONE, false, true, false, true); + HFileContext hFileContext = writer.getHFileWriter().getFileContext(); + assertArrayEquals(family, hFileContext.getColumnFamily()); + assertArrayEquals(table, hFileContext.getTableName()); + } + private HStoreFile mockStoreFileWithLength(long length) { HStoreFile sf = mock(HStoreFile.class); StoreFileReader sfr = mock(StoreFileReader.class); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java index bb4f602d76..117b869f70 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java @@ -120,6 +120,7 @@ public class HFileTestUtil { HFileContext meta = new HFileContextBuilder() .withIncludesTags(withTag) .withDataBlockEncoding(encoding) + .withColumnFamily(family) .build(); HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration)) .withPath(fs, path) -- 2.17.2 (Apple Git-113)