From 1388520505b6f954d61946a7a26f70b940eca49a Mon Sep 17 00:00:00 2001 From: manukranthk Date: Thu, 23 Oct 2014 17:35:42 -0700 Subject: [PATCH] [INTERNAL] HBASE-12333 Add Integration Test Runner which is more friendly (Not committed to OS yet) Summary: The patch is pending review on the open source. Committing internally for the time being. Will commit to open source as soon as I get a reply. Test Plan: Tested on hbaseci001 Reviewers: elliott, daviddeng Subscribers: bisho, hbase-eng@ Differential Revision: https://phabricator.fb.com/D1755970 Tasks: 5815242 Conflicts: hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java --- .../hadoop/hbase/IntegrationTestsDriver.java | 120 ++++++++++++++++++++- .../hbase/test/IntegrationTestBigLinkedList.java | 29 +++-- ...IntegrationTestBigLinkedListWithVisibility.java | 11 +- 3 files changed, 146 insertions(+), 14 deletions(-) diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java index 47ce9e1..1a2992b 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java @@ -18,7 +18,12 @@ package org.apache.hadoop.hbase; +import java.io.File; +import java.io.FileWriter; import java.io.IOException; +import java.io.PrintStream; +import java.util.ArrayList; +import java.util.List; import java.util.Set; import java.util.regex.Pattern; @@ -26,11 +31,15 @@ import org.apache.commons.cli.CommandLine; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.IntegrationTests; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.util.ToolRunner; +import org.codehaus.jettison.json.JSONWriter; import org.junit.internal.TextListener; import org.junit.runner.JUnitCore; import org.junit.runner.Result; +import org.junit.runner.notification.Failure; /** * This class drives the Integration test suite execution. Executes all @@ -40,6 +49,10 @@ import org.junit.runner.Result; public class IntegrationTestsDriver extends AbstractHBaseTool { private static final String SHORT_REGEX_ARG = "r"; private static final String LONG_REGEX_ARG = "regex"; + private static final String JOB_NAME_KEY = "hbase.it.job.name"; + private static final String IT_RESULTS_DIR_KEY = "hbase.it.results.dir"; + private String confFile = null; + private boolean json = false; private static final Log LOG = LogFactory.getLog(IntegrationTestsDriver.class); private IntegrationTestFilter intTestFilter = new IntegrationTestFilter(); @@ -74,28 +87,118 @@ public class IntegrationTestsDriver extends AbstractHBaseTool { "Java regex to use selecting tests to run: e.g. .*TestBig.*" + " will select all tests that include TestBig in their name. Default: " + ".*IntegrationTest.*"); + addOptWithArg("D", + "Configuration parameter." + + " Note that some configuration parameters might not make it to the task trackers"); + addOptWithArg("c", "conf", + "Configuration file that should be used for this test runner. Any parameters overriden by " + + "command line will take effect"); + addOptNoArg("json", "Write results to json output and store it in DFS"); } @Override protected void processOptions(CommandLine cmd) { String testFilterString = cmd.getOptionValue(SHORT_REGEX_ARG, null); + LOG.debug("The regex to search the classes is : " + testFilterString); if (testFilterString != null) { intTestFilter.setPattern(testFilterString); } + if (cmd.hasOption("D")) { + String [] confProperties = cmd.getOptionValues("D"); + for (String property : confProperties) { + try { + String [] parts = property.split("="); + if (parts.length != 2) throw new IllegalArgumentException( + "Expected foramt of conf properties is =, but got this : " + property); + conf.set(parts[0], parts[1]); + } catch (Exception e) { + LOG.error("Unsupported conf params passed.", e); + } + } + } + if (cmd.hasOption("c")) { + this.confFile = cmd.getOptionValue("c"); + } + if (cmd.hasOption("json")) { + this.json = true; + } } /** * Returns test classes annotated with @Category(IntegrationTests.class), * according to the filter specific on the command line (if any). */ - private Class[] findIntegrationTestClasses() + protected Class[] findIntegrationTestClasses() throws ClassNotFoundException, LinkageError, IOException { ClassTestFinder.TestFileNameFilter nameFilter = new ClassTestFinder.TestFileNameFilter(); ClassFinder classFinder = new ClassFinder(nameFilter, nameFilter, intTestFilter); Set> classes = classFinder.findClasses(true); + LOG.debug("Printing the classes that are going to be executed"); + for(Class klass : classes) { + LOG.debug("Class : " + klass.getCanonicalName()); + } return classes.toArray(new Class[classes.size()]); } + private String getFailuresString(Result result) { + StringBuilder sb = new StringBuilder(); + for (Failure f : result.getFailures()) { + sb.append("Message : "); + sb.append(f.getMessage()); + sb.append(" Stack Trace : "); + sb.append(f.getTrace()); + } + return sb.toString(); + } + + protected void writeJSON(List results) throws Exception { + // In case we provide a configuration file on the command line, it will be applied. + if (confFile != null) { + conf.addResource(confFile); + } + //this is called from the command line, so we should set to use the distributed cluster + IntegrationTestingUtility.setUseDistributedCluster(conf); + String jobName = conf.get(JOB_NAME_KEY, "job"); + String resultsDir = conf.get(IT_RESULTS_DIR_KEY, "/tmp/"); + + String jsonFile = "/tmp/" + jobName + ".json"; + FileWriter jsonFileWriter = new FileWriter(jsonFile, true); + + JSONWriter jsonWriter = new JSONWriter(jsonFileWriter); + jsonWriter.object(); // { + + LOG.info("Found " + results.size() + " integration tests"); + FileSystem fs = FileSystem.get(conf); + for (Result result : results) { + LOG.info("Unit test : " + result.getClass()); + JUnitCore junit = new JUnitCore(); + String outFile = "/tmp/" + result.getClass().getName() + ".out"; + junit.addListener(new TextListener(new PrintStream(new File(outFile)))); + + Path outFileOnDFS = new Path(new Path(resultsDir, jobName), + result.getClass().getName() + ".out"); + + fs.copyFromLocalFile(true, new Path(outFile), outFileOnDFS); + + jsonWriter.key(result.getClass().getName()); // aClass.getName(): + jsonWriter.object(); // { + jsonWriter.key("outfile"); // outfile: + jsonWriter.value(outFileOnDFS.getName()); // filePath + jsonWriter.key("success"); // success: + jsonWriter.value(result.wasSuccessful()); // result.wasSuccessful() + jsonWriter.key("failureCnt"); // failureCnt: + jsonWriter.value(result.getFailureCount()); // result.getFailureCount() + jsonWriter.key("runTime"); // runTime: + jsonWriter.value(result.getRunTime()); // result.getRunTime() + jsonWriter.key("failures"); // runTime: + jsonWriter.value(getFailuresString(result)); // result.getRunTime() + jsonWriter.endObject(); // } + } + jsonWriter.endObject(); // } + jsonFileWriter.close(); + fs.copyFromLocalFile(true, new Path(jsonFile), + new Path(new Path(resultsDir, jobName), jobName + ".json")); + } @Override protected int doWork() throws Exception { @@ -108,8 +211,19 @@ public class IntegrationTestsDriver extends AbstractHBaseTool { } JUnitCore junit = new JUnitCore(); junit.addListener(new TextListener(System.out)); - Result result = junit.run(classes); + + boolean successfull = true; + List results = new ArrayList(); + for (Class klass : classes) { + Result result = junit.run(klass); + results.add(result); + successfull = result.wasSuccessful() && successfull; + } + + if (json) { + writeJSON(results); + } - return result.wasSuccessful() ? 0 : 1; + return successfull ? 0 : 1; } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index c7dae78..f0e8083 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -182,18 +182,27 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { protected static final byte[] COLUMN_COUNT = Bytes.toBytes("count"); /** How many rows to write per map task. This has to be a multiple of 25M */ - private static final String GENERATOR_NUM_ROWS_PER_MAP_KEY + protected static final String GENERATOR_NUM_ROWS_PER_MAP_KEY = "IntegrationTestBigLinkedList.generator.num_rows"; - private static final String GENERATOR_NUM_MAPPERS_KEY + protected static final String GENERATOR_NUM_MAPPERS_KEY = "IntegrationTestBigLinkedList.generator.map.tasks"; + + protected static final String VERIFY_NUM_REDUCERS_KEY = + "IntegrationTestBigLinkedList.verify.reducer.tasks"; - private static final String GENERATOR_WIDTH_KEY + protected static final String GENERATOR_WIDTH_KEY = "IntegrationTestBigLinkedList.generator.width"; - private static final String GENERATOR_WRAP_KEY + protected static final String GENERATOR_WRAP_KEY = "IntegrationTestBigLinkedList.generator.wrap"; + protected static final String LOOP_NUM_ITERATIONS_KEY = + "IntegrationTestBigLinkedList.generator.map.tasks"; + + protected static final String ITBLL_OUTPUT_FOLDER_KEY = + "IntegrationTestBigLinkedList.output.folder"; + protected int NUM_SLAVES_BASE = 3; // number of slaves for the cluster private static final int MISSING_ROWS_TO_LOG = 50; @@ -1112,10 +1121,14 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { @Test public void testContinuousIngest() throws IOException, Exception { - //Loop + // Loop int ret = ToolRunner.run(getTestingUtil(getConf()).getConfiguration(), new Loop(), - new String[] {"1", "1", "2000000", - util.getDataTestDirOnTestFS("IntegrationTestBigLinkedList").toString(), "1"}); + new String[] {conf.getInt(LOOP_NUM_ITERATIONS_KEY, 1) + "", + conf.getInt(GENERATOR_NUM_MAPPERS_KEY, 1) + "", + conf.getInt(GENERATOR_NUM_ROWS_PER_MAP_KEY, 200000000) + "", + conf.get(ITBLL_OUTPUT_FOLDER_KEY, + util.getDataTestDirOnTestFS("IntegrationTestBigLinkedList").toString()), + conf.getInt(VERIFY_NUM_REDUCERS_KEY, 1) + ""}); org.junit.Assert.assertEquals(0, ret); } @@ -1217,4 +1230,4 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { int ret = ToolRunner.run(conf, new IntegrationTestBigLinkedList(), args); System.exit(ret); } -} \ No newline at end of file +} diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java index dc517a5..666a80b 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java @@ -638,9 +638,14 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB int ret = ToolRunner.run( getTestingUtil(getConf()).getConfiguration(), new VisibilityLoop(), - new String[] { "1", "1", "20000", - util.getDataTestDirOnTestFS("IntegrationTestBigLinkedListWithVisibility").toString(), - "1", "10000" }); + new String[] { + conf.getInt(LOOP_NUM_ITERATIONS_KEY, 1) + "", + conf.getInt(GENERATOR_NUM_MAPPERS_KEY, 1) + "", + conf.getInt(GENERATOR_NUM_ROWS_PER_MAP_KEY, 20000) + "", + conf.get(ITBLL_OUTPUT_FOLDER_KEY, + util.getDataTestDirOnTestFS("IntegrationTestBigLinkedListWithVisibility").toString()), + conf.getInt(VERIFY_NUM_REDUCERS_KEY, 1) + "", + conf.getInt(GENERATOR_WIDTH_KEY, 10000) + ""}); org.junit.Assert.assertEquals(0, ret); } -- 1.9.5