From 2021c155868524b1526f0902347f838dd4182c10 Mon Sep 17 00:00:00 2001 From: Sergey Soldatov Date: Tue, 13 Feb 2018 22:08:11 -0800 Subject: HBASE-19863 java.lang.IllegalStateException: isDelete failed when SingleColumnValueFilter is used --- .../hadoop/hbase/regionserver/StoreScanner.java | 3 + .../apache/hadoop/hbase/HBaseTestingUtility.java | 28 +++++-- .../org/apache/hadoop/hbase/client/TestAdmin1.java | 92 +++++++++++++++++++++- 3 files changed, 117 insertions(+), 6 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java index 0b9b547..cff9c51 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java @@ -816,6 +816,9 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner return false; } } while ((nextCell = this.heap.peek()) != null && CellUtil.matchingRowColumn(cell, nextCell)); + if (nextCell != null && matcher.compareKeyForNextColumn(nextCell, cell) < 0) { + return false; + } return true; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 2bdfd2d..0ec78c8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -1387,14 +1387,32 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { */ public Table createTable(TableDescriptor htd, byte[][] families, byte[][] splitKeys, Configuration c) throws IOException { + // Disable blooms (they are on by default as of 0.95) but we disable them here because + // tests have hard coded counts of what to expect in block cache, etc., and blooms being + // on is interfering. + return createTable(htd, families, splitKeys, BloomType.NONE, HConstants.DEFAULT_BLOCKSIZE, c); + } + + /** + * Create a table. + * @param htd + * @param families + * @param splitKeys + * @param type Bloom type + * @param blockSize block size + * @param c Configuration to use + * @return A Table instance for the created table. + * @throws IOException + */ + + public Table createTable(TableDescriptor htd, byte[][] families, byte[][] splitKeys, BloomType type, int blockSize, + Configuration c) throws IOException { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(htd); for (byte[] family : families) { - // Disable blooms (they are on by default as of 0.95) but we disable them here because - // tests have hard coded counts of what to expect in block cache, etc., and blooms being - // on is interfering. builder.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family) - .setBloomFilterType(BloomType.NONE) - .build()); + .setBloomFilterType(type) + .setBlocksize(blockSize) + .build()); } TableDescriptor td = builder.build(); getAdmin().createTable(td, splitKeys); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java index c48d130..b47241a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java @@ -31,6 +31,8 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -45,7 +47,11 @@ import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.exceptions.MergeRegionException; +import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.master.LoadBalancer; +import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStoreFile; @@ -236,7 +242,91 @@ public class TestAdmin1 { } } - @Test + @Test (timeout=300000) + public void testIsDeleteFailure() throws Exception { + final TableName table = TableName.valueOf(name.getMethodName()); + final byte [] family = Bytes.toBytes("0"); + final byte [] c1 = Bytes.toBytes("C01"); + final byte [] c2 = Bytes.toBytes("C02"); + final byte [] c3 = Bytes.toBytes("C03"); + final byte [] c4 = Bytes.toBytes("C04"); + final byte [] c5 = Bytes.toBytes("C05"); + final byte [] c6 = Bytes.toBytes("C07"); + final byte [] c7 = Bytes.toBytes("C07"); + final byte [] c8 = Bytes.toBytes("C08"); + final byte [] c9 = Bytes.toBytes("C09"); + final byte [] c10 = Bytes.toBytes("C10"); + final byte [] c11 = Bytes.toBytes("C11"); + final byte [] c12 = Bytes.toBytes("C12"); + final byte [] c13 = Bytes.toBytes("C13"); + final byte [] c14 = Bytes.toBytes("C14"); + final byte [] c15 = Bytes.toBytes("C15"); + + final byte [] val = Bytes.toBytes("foo"); + List fams = new ArrayList<>(); + fams.add(family); + Table ht = TEST_UTIL.createTable(new HTableDescriptor(table), fams.toArray(new byte[0][]), null, + BloomType.ROWCOL, 10000, new Configuration(TEST_UTIL.getConfiguration())); + List pending = new ArrayList(); + for (int i = 0; i < 1000; i++) { + byte [] row = Bytes.toBytes("key" + Integer.toString(i)); + Put put = new Put(row); + put.addColumn(family,c3,val); + put.addColumn(family,c4,val); + put.addColumn(family,c5,val); + put.addColumn(family,c6,val); + put.addColumn(family,c7,val); + put.addColumn(family,c8,val); + put.addColumn(family,c12,val); + put.addColumn(family,c13,val); + put.addColumn(family,c15,val); + pending.add(put); + Delete del = new Delete(row); + del.addColumns(family,c2); + del.addColumns(family,c9); + del.addColumns(family,c10); + del.addColumns(family,c14); + pending.add(del); + } + ht.batch(pending, new Object[pending.size()]); + TEST_UTIL.flush(); + TEST_UTIL.compact(true); + for (int i = 20; i < 25; i++) { + byte [] row = Bytes.toBytes("key" + Integer.toString(i)); + Put put = new Put(row); + put.addColumn(family,c3,val); + put.addColumn(family,c4,val); + put.addColumn(family,c5,val); + put.addColumn(family,c6,val); + put.addColumn(family,c7,val); + put.addColumn(family,c8,val); + put.addColumn(family,c12,val); + put.addColumn(family,c13,val); + put.addColumn(family,c15,val); + pending.add(put); + Delete del = new Delete(row); + del.addColumns(family,c2); + del.addColumns(family,c9); + del.addColumns(family,c10); + del.addColumns(family,c14); + pending.add(del); + } + ht.batch(pending, new Object[pending.size()]); + TEST_UTIL.flush(); + + Scan scan = new Scan(); + scan.addColumn(family,c9); + scan.addColumn(family,c15); + SingleColumnValueFilter filter = new SingleColumnValueFilter(family,c15, CompareFilter.CompareOp.EQUAL, + new BinaryComparator(c15)); + scan.setFilter(filter); + ResultScanner scanner = ht.getScanner(scan); + //Trigger the scan for not existing row, so it will scan over all rows + for(Result result : ht.getScanner(scan)) { + } + } + + @Test (timeout=300000) public void testDisableAndEnableTable() throws IOException { final byte [] row = Bytes.toBytes("row"); final byte [] qualifier = Bytes.toBytes("qualifier"); -- 2.5.4 (Apple Git-61)