diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 3824294..b89ff5e 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -3233,6 +3233,25 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { return totalNumberOfRegions; } + public static void createPreSplitLoadTestTable(Configuration conf, HTableDescriptor desc, + HColumnDescriptor hcd, int totalNumberOfRegions) throws IOException { + if (!desc.hasFamily(hcd.getName())) { + desc.addFamily(hcd); + } + HBaseAdmin admin = new HBaseAdmin(conf); + byte[][] splits = new RegionSplitter.HexStringSplit().split(totalNumberOfRegions); + try { + admin.createTable(desc, splits); + } catch (MasterNotRunningException e) { + LOG.error("Master not running", e); + throw new IOException(e); + } catch (TableExistsException e) { + LOG.warn("Table " + desc.getTableName() + " already exists, continuing"); + } finally { + admin.close(); + } + } + public static int getMetaRSPort(Configuration conf) throws IOException { HTable table = new HTable(conf, TableName.META_TABLE_NAME); HRegionLocation hloc = table.getRegionLocation(Bytes.toBytes("")); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java index e9e2310..6caa18f 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -66,6 +66,7 @@ import org.apache.hadoop.hbase.filter.WhileMatchFilter; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; +import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.trace.SpanReceiverHost; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Hash; @@ -80,6 +81,7 @@ import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.apache.hadoop.util.bloom.BloomFilter; import org.codehaus.jackson.map.ObjectMapper; import com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -292,6 +294,7 @@ public class PerformanceEvaluation extends Configured implements Tool { HColumnDescriptor family = new HColumnDescriptor(FAMILY_NAME); family.setDataBlockEncoding(opts.blockEncoding); family.setCompressionType(opts.compression); + family.setBloomFilterType(opts.bloomType); if (opts.inMemoryCF) { family.setInMemory(true); } @@ -509,6 +512,7 @@ public class PerformanceEvaluation extends Configured implements Tool { this.compression = that.compression; this.blockEncoding = that.blockEncoding; this.filterAll = that.filterAll; + this.bloomType = that.bloomType; } public boolean nomapred = false; @@ -531,6 +535,7 @@ public class PerformanceEvaluation extends Configured implements Tool { public boolean inMemoryCF = false; public int presplitRegions = 0; public Compression.Algorithm compression = Compression.Algorithm.NONE; + public BloomType bloomType = BloomType.NONE; public DataBlockEncoding blockEncoding = DataBlockEncoding.NONE; } @@ -1166,6 +1171,7 @@ public class PerformanceEvaluation extends Configured implements Tool { + " there by not returning any thing back to the client. Helps to check the server side" + " performance. Uses FilterAllFilter internally. "); System.err.println(" latency Set to report operation latencies. Default: False"); + System.err.println(" bloomFilter Bloom filter type, one of " + Arrays.toString(BloomType.values())); System.err.println(); System.err.println(" Note: -D properties will be applied to the conf used. "); System.err.println(" For example: "); @@ -1289,7 +1295,7 @@ public class PerformanceEvaluation extends Configured implements Tool { opts.presplitRegions = Integer.parseInt(cmd.substring(presplit.length())); continue; } - + final String inMemory = "--inmemory="; if (cmd.startsWith(inMemory)) { opts.inMemoryCF = Boolean.parseBoolean(cmd.substring(inMemory.length())); @@ -1332,6 +1338,12 @@ public class PerformanceEvaluation extends Configured implements Tool { continue; } + final String bloomFilter = "--bloomFilter"; + if (cmd.startsWith(bloomFilter)) { + opts.bloomType = BloomType.valueOf(cmd.substring(bloomFilter.length())); + continue; + } + Class cmdClass = determineCommandClass(cmd); if (cmdClass != null) { opts.numClientThreads = getNumClients(i + 1, args); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java index 4d17556..8d6bbdb 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java @@ -31,6 +31,7 @@ import java.util.concurrent.atomic.AtomicReference; import javax.crypto.spec.SecretKeySpec; import org.apache.commons.cli.CommandLine; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -41,6 +42,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.PerformanceEvaluation; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.crypto.Cipher; @@ -112,6 +114,9 @@ public class LoadTestTool extends AbstractHBaseTool { private static final String OPT_BLOOM = "bloom"; private static final String OPT_COMPRESSION = "compression"; + private static final String OPT_DEFERRED_LOG_FLUSH = "deferredlogflush"; + public static final String OPT_DEFERRED_LOG_FLUSH_USAGE = "Enable deferred log flush."; + protected static final String OPT_SPLITS = "splits"; public static final String OPT_DATA_BLOCK_ENCODING = HColumnDescriptor.DATA_BLOCK_ENCODING.toLowerCase(); @@ -157,12 +162,14 @@ public class LoadTestTool extends AbstractHBaseTool { protected long startKey, endKey; protected boolean isWrite, isRead, isUpdate; + protected boolean deferredLogFlush; // Column family options protected DataBlockEncoding dataBlockEncodingAlgo; protected Compression.Algorithm compressAlgo; protected BloomType bloomType; private boolean inMemoryCF; + private int splits = -1; private User userOwner; // Writer options @@ -260,6 +267,7 @@ public class LoadTestTool extends AbstractHBaseTool { } LOG.info("Enabling table " + tableName); admin.enableTable(tableName); + admin.close(); } @Override @@ -306,6 +314,8 @@ public class LoadTestTool extends AbstractHBaseTool { + "tool will create the test table with n regions per server"); addOptWithArg(OPT_ENCRYPTION, OPT_ENCRYPTION_USAGE); + addOptWithArg(OPT_SPLITS, "Number of splits for table, default 5 * number of region servers."); + addOptNoArg(OPT_DEFERRED_LOG_FLUSH, OPT_DEFERRED_LOG_FLUSH_USAGE); } @Override @@ -319,6 +329,7 @@ public class LoadTestTool extends AbstractHBaseTool { isRead = cmd.hasOption(OPT_READ); isUpdate = cmd.hasOption(OPT_UPDATE); isInitOnly = cmd.hasOption(OPT_INIT_ONLY); + deferredLogFlush = cmd.hasOption(OPT_DEFERRED_LOG_FLUSH); if (!isWrite && !isRead && !isUpdate && !isInitOnly) { throw new IllegalArgumentException("Either -" + OPT_WRITE + " or " + @@ -344,6 +355,14 @@ public class LoadTestTool extends AbstractHBaseTool { System.out.println("Key range: [" + startKey + ".." + (endKey - 1) + "]"); } + if (cmd.hasOption(OPT_SPLITS)) { + String splitVal = cmd.getOptionValue(OPT_SPLITS); + LOG.info("split value : " + splitVal); + if (StringUtils.isNotEmpty(splitVal)) { + splits = Integer.parseInt(splitVal); + } + } + parseColumnFamilyOptions(cmd); if (isWrite) { @@ -441,11 +460,22 @@ public class LoadTestTool extends AbstractHBaseTool { if (cmd.hasOption(OPT_ENCRYPTION)) { cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION)); } + } public void initTestTable() throws IOException { - HBaseTestingUtility.createPreSplitLoadTestTable(conf, tableName, - COLUMN_FAMILY, compressAlgo, dataBlockEncodingAlgo); + HTableDescriptor desc = new HTableDescriptor(tableName); + if (deferredLogFlush) { + desc.setDurability(Durability.ASYNC_WAL); + } + HColumnDescriptor hcd = new HColumnDescriptor(COLUMN_FAMILY); + hcd.setDataBlockEncoding(dataBlockEncodingAlgo); + hcd.setCompressionType(compressAlgo); + if (splits != -1) { + HBaseTestingUtility.createPreSplitLoadTestTable(conf, desc, hcd, splits); + } else { + HBaseTestingUtility.createPreSplitLoadTestTable(conf, desc, hcd); + } applyColumnFamilyOptions(tableName, COLUMN_FAMILIES); }