From bbd00ccd3e09c1482a3dddcd11ea6ddfd48a1838 Mon Sep 17 00:00:00 2001 From: stack Date: Wed, 18 Mar 2015 21:18:03 -0700 Subject: [PATCH] HBASE-13187 Add ITBLL that exercises per CF flush --- .../hbase/test/IntegrationTestBigLinkedList.java | 44 ++++++++++++++++++++-- 1 file changed, 40 insertions(+), 4 deletions(-) diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index 9864031..e47f9d6 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hbase.test; import java.io.DataInput; import java.io.DataOutput; -import java.io.IOException; import java.io.FileNotFoundException; +import java.io.IOException; import java.io.InterruptedIOException; import java.util.ArrayList; import java.util.Arrays; @@ -100,16 +100,16 @@ import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; +import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsBinaryInputFormat; -import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; -import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.junit.Test; @@ -189,6 +189,8 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { protected static String DEFAULT_TABLE_NAME = "IntegrationTestBigLinkedList"; protected static byte[] FAMILY_NAME = Bytes.toBytes("meta"); + private static byte [] BIG_FAMILY_NAME = Bytes.toBytes("big"); + private static byte [] TINY_FAMILY_NAME = Bytes.toBytes("tiny"); //link to the id of the prev node in the linked list protected static final byte[] COLUMN_PREV = Bytes.toBytes("prev"); @@ -236,6 +238,18 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { static class Generator extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(Generator.class); + /** + * Set this configuration if you want to test single-column family flush works. + * If set, we will add a big column family and a small column family on either side of the + * usual ITBLL 'meta' column family. When we write out the ITBLL, we will also add to the big + * column family a value bigger than that for ITBLL and for small, something way smaller. + * The idea is that when flush-by-column family rather than by + * region is enabled, we can see if ITBLL is broke in any way. + * Here is how you would pass it: + * $ ./bin/hbase org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList -Dgenerator.multiple.columnfamilies=true generator 1 10 g + */ + public static final String MULTIPLE_UNEVEN_COLUMNFAMILIES_KEY = + "generator.multiple.columnfamilies"; static class GeneratorInputFormat extends InputFormat { static class GeneratorInputSplit extends InputSplit implements Writable { @@ -363,6 +377,9 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { long numNodes; long wrap; int width; + boolean multipleUnevenColumnFamilies; + byte [] tinyValue = new byte [] {'t', 'i', 'n', 'y'}; + byte [] bigValue = null; @Override protected void setup(Context context) throws IOException, InterruptedException { @@ -378,6 +395,8 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { if (this.numNodes < this.wrap) { this.wrap = this.numNodes; } + this.multipleUnevenColumnFamilies = + context.getConfiguration().getBoolean(MULTIPLE_UNEVEN_COLUMNFAMILIES_KEY, false); } protected void instantiateHTable() throws IOException { @@ -442,6 +461,17 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { if (id != null) { put.add(FAMILY_NAME, COLUMN_CLIENT, id); } + // See if we are to write multiple columns. + if (this.multipleUnevenColumnFamilies) { + // Use any column name. + put.addColumn(TINY_FAMILY_NAME, this.tinyValue, this.tinyValue); + // If we've not allocated bigValue, do it now. Reuse same value each time. + if (this.bigValue == null) { + this.bigValue = new byte [current.length * 10]; + } + // Use any column name. Here we use column client. + put.addColumn(BIG_FAMILY_NAME, COLUMN_CLIENT, this.bigValue); + } mutator.mutate(put); if (i % 1000 == 0) { @@ -474,12 +504,18 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { protected void createSchema() throws IOException { Configuration conf = getConf(); + boolean multipleUnevenColumnFamilies = + conf.getBoolean(MULTIPLE_UNEVEN_COLUMNFAMILIES_KEY, false); Admin admin = new HBaseAdmin(conf); TableName tableName = getTableName(conf); try { if (!admin.tableExists(tableName)) { HTableDescriptor htd = new HTableDescriptor(getTableName(getConf())); htd.addFamily(new HColumnDescriptor(FAMILY_NAME)); + if (multipleUnevenColumnFamilies) { + htd.addFamily(new HColumnDescriptor(BIG_FAMILY_NAME)); + htd.addFamily(new HColumnDescriptor(TINY_FAMILY_NAME)); + } int numberOfServers = admin.getClusterStatus().getServers().size(); if (numberOfServers == 0) { throw new IllegalStateException("No live regionservers"); @@ -1531,4 +1567,4 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { int ret = ToolRunner.run(conf, new IntegrationTestBigLinkedList(), args); System.exit(ret); } -} +} \ No newline at end of file -- 2.2.1