diff --git itests/qtest-accumulo/src/test/java/org/apache/hadoop/hive/cli/TestAccumuloCliDriver.java itests/qtest-accumulo/src/test/java/org/apache/hadoop/hive/cli/TestAccumuloCliDriver.java index bf50f16..f2a224d 100644 --- itests/qtest-accumulo/src/test/java/org/apache/hadoop/hive/cli/TestAccumuloCliDriver.java +++ itests/qtest-accumulo/src/test/java/org/apache/hadoop/hive/cli/TestAccumuloCliDriver.java @@ -18,45 +18,69 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.accumulo.AccumuloQTestUtil; +import org.apache.hadoop.hive.accumulo.AccumuloTestSetup; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestAccumuloCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private static AccumuloTestSetup setup; + private File qfile; - static CliAdapter adapter = new CliConfigs.AccumuloCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("accumulo-handler/src/test/queries/positive", + "accumulo-handler/src/test/results/positive", + "itests/qtest/target/qfile-results/accumulo-handler/positive", null, + QTestUtil.MiniClusterType.none, "", "q_test_init.sql", "q_test_cleanup.sql", true); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.beeline_positive_exclude); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestAccumuloCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + setup = new AccumuloTestSetup(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt = new AccumuloQTestUtil(cliConfig, setup); + qt.init(null); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.shutdown(); + } - public TestAccumuloCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + setup.tearDown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestMiniSparkOnYarnCliDriver.java itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestMiniSparkOnYarnCliDriver.java index e84bfce..1f5dc1d 100644 --- itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestMiniSparkOnYarnCliDriver.java +++ itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestMiniSparkOnYarnCliDriver.java @@ -1,45 +1,68 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestMiniSparkOnYarnCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.SparkOnYarnCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientpositive", + "ql/src/test/results/clientpositive/spark", + "itests/qtest/target/qfile-results/clientpositive/spark", null, + QTestUtil.MiniClusterType.miniSparkOnYarn, "data/conf/spark/yarn-client", + "q_test_init.sql", "q_test_cleanup.sql", true); + cliConfig.includeQuerySet(CliConfig.TestFileSet.miniSparkOnYarn_query_files); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestMiniSparkOnYarnCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestMiniSparkOnYarnCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestSparkCliDriver.java itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestSparkCliDriver.java index 2c8cbee..4e5c4c3 100644 --- itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestSparkCliDriver.java +++ itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestSparkCliDriver.java @@ -1,45 +1,68 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestSparkCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.SparkCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientpositive", + "ql/src/test/results/clientpositive/spark", + "itests/qtest/target/qfile-results/clientpositive/spark", null, + QTestUtil.MiniClusterType.spark, "data/conf/spark/standalone", "q_test_init.sql", + "q_test_cleanup.sql", true); + cliConfig.includeQuerySet(CliConfig.TestFileSet.spark_query_files); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestSparkCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestSparkCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestSparkNegativeCliDriver.java itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestSparkNegativeCliDriver.java index 2db83f4..190f121 100644 --- itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestSparkNegativeCliDriver.java +++ itests/qtest-spark/src/test/java/org/apache/hadoop/hive/cli/TestSparkNegativeCliDriver.java @@ -1,45 +1,68 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestSparkNegativeCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.SparkNegativeCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientnegative", + "ql/src/test/results/clientnegative/spark", + "itests/qtest/target/qfile-results/clientnegative/spark", null, + QTestUtil.MiniClusterType.spark, "data/conf/spark/standalone", "q_test_init.sql", + "q_test_cleanup.sql", true); + cliConfig.includeQuerySet(CliConfig.TestFileSet.spark_query_negative_files); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestSparkNegativeCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestSparkNegativeCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runFailingQuery(qfile); } - -} +} \ No newline at end of file diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/ContribNegativeCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/ContribNegativeCliDriver.java deleted file mode 100644 index 253cda3..0000000 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/ContribNegativeCliDriver.java +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli; - -import java.io.File; -import java.util.List; - -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TestRule; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -@RunWith(Parameterized.class) -public class ContribNegativeCliDriver { - - static CliAdapter adapter = new CliConfigs.ContribNegativeCliConfig().getCliAdapter(); - - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); - } - - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); - - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); - - private String name; - private File qfile; - - public ContribNegativeCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; - } - - @Test - public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); - } - -} diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/DisabledTestBeeLineDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/DisabledTestBeeLineDriver.java deleted file mode 100644 index cb276e6..0000000 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/DisabledTestBeeLineDriver.java +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli; - -import java.io.File; -import java.util.List; - -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TestRule; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -@RunWith(Parameterized.class) -public class DisabledTestBeeLineDriver { - - static CliAdapter adapter = new CliConfigs.BeeLineConfig().getCliAdapter(); - - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); - } - - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); - - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); - - private String name; - private File qfile; - - public DisabledTestBeeLineDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; - } - - @Test - public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); - } - -} diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/DummyCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/DummyCliDriver.java deleted file mode 100644 index 965d1dc..0000000 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/DummyCliDriver.java +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli; - -import java.io.File; -import java.util.List; - -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TestRule; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -@RunWith(Parameterized.class) -public class DummyCliDriver { - - static CliAdapter adapter = new CliConfigs.DummyConfig().getCliAdapter(); - - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); - } - - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); - - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); - - private String name; - private File qfile; - - public DummyCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; - } - - @Test - public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); - } - -} diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestBeeLineDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestBeeLineDriver.java new file mode 100644 index 0000000..c13071e --- /dev/null +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestBeeLineDriver.java @@ -0,0 +1,160 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.cli; + +import static org.junit.Assert.fail; + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.apache.hive.service.server.HiveServer2; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@Ignore("will be enabled soon ;)") +@RunWith(Parameterized.class) +public class TestBeeLineDriver { + private static CliConfig cliConfig; + private static boolean overwrite = false; + private static QTestUtil.QTestSetup miniZKCluster = null; + private static HiveServer2 hiveServer2; + + private File qfile; +// private QFileClient qClient; + + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientpositive", + "ql/src/test/results/clientpositive", "itests/qtest/target/qfile-results/beelinepositive", + null, QTestUtil.MiniClusterType.none, "", "q_test_init.sql", "q_test_cleanup.sql", true); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.beeline_positive_exclude); + } + + @BeforeClass + public static void beforeClass() throws Exception { + HiveConf hiveConf = new HiveConf(); + hiveConf.logVars(System.err); + System.err.flush(); + + String testOutputOverwrite = System.getProperty("test.output.overwrite"); + if (testOutputOverwrite != null && "true".equalsIgnoreCase(testOutputOverwrite)) { + overwrite = true; + } + + miniZKCluster = new QTestUtil.QTestSetup(); + miniZKCluster.preTest(hiveConf); + + System.setProperty("hive.zookeeper.quorum", hiveConf.get("hive.zookeeper.quorum")); + System.setProperty("hive.zookeeper.client.port", hiveConf.get("hive.zookeeper.client.port")); + + String disableserver = System.getProperty("test.service.disable.server"); + if (null != disableserver && disableserver.equalsIgnoreCase("true")) { + System.err.println("test.service.disable.server=true - Skipping HiveServer2 initialization!"); + return; + } + + hiveServer2 = new HiveServer2(); + hiveServer2.init(hiveConf); + System.err.println("Starting HiveServer2..."); + hiveServer2.start(); + Thread.sleep(5000); + } + + @AfterClass + public static void afterClass() { + try { + if (hiveServer2 != null) { + System.err.println("Stopping HiveServer2..."); + hiveServer2.stop(); + } + } catch (Throwable t) { + t.printStackTrace(); + } + + if (miniZKCluster != null) { + try { + miniZKCluster.tearDown(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + @Before + public void before() throws Exception { +// qClient = new QFileClient(new HiveConf(), QTestUtil.getHiveRootDir(), +// cliConfig.getQueryDir(), cliConfig.getLogDir(), cliConfig.getResultsDir()) +// .setQFileName(qfile.getName()) +// .setUsername("user") +// .setPassword("password") +// .setJdbcUrl("jdbc:hive2://localhost:10000") +// .setJdbcDriver("org.apache.hive.jdbc.HiveDriver") +// .setTestDataDirectory(QTestUtil.getHiveRootDir() + "/data/files") +// .setTestScriptDirectory(QTestUtil.getHiveRootDir() + "/data/scripts"); + } + + @Parameterized.Parameters(name = "{0}") + public static List getParameters() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); + } + + public TestBeeLineDriver(String name, File qfile) { + this.qfile = qfile; + } + + @Test + public void testCliDriver() throws Exception { +// long startTime = System.currentTimeMillis(); +// System.err.println(">>> STARTED " + qfile.getName() + " (Thread " +// + Thread.currentThread().getName() + ")"); +// try { +// qClient.run(); +// } catch (Exception e) { +// System.err.println(">>> FAILED " + qfile.getName() + " with exception:"); +// e.printStackTrace(); +// throw e; +// } +// long elapsedTime = (System.currentTimeMillis() - startTime) / 1000; +// String time = "(" + elapsedTime + "s)"; +// +// if (qClient.compareResults()) { +// System.err.println(">>> PASSED " + qfile.getName() + " " + time); +// } else { +// if (qClient.hasErrors()) { +// System.err.println(">>> FAILED " + qfile.getName() + " (ERROR) " + time); +// fail(); +// } +// if (overwrite) { +// System.err.println(">>> PASSED " + qfile.getName() + " (OVERWRITE) " + time); +// qClient.overwriteResults(); +// } else { +// System.err.println(">>> FAILED " + qfile.getName() + " (DIFF) " + time); +// fail(); +// } +// } + } +} diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestCliDriver.java index c4c4f41..252598f 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestCliDriver.java @@ -18,45 +18,71 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.CliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientpositive", + "ql/src/test/results/clientpositive", "itests/qtest/target/qfile-results/clientpositive", + null, QTestUtil.MiniClusterType.none, "", "q_test_init.sql", "q_test_cleanup.sql", true); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.minillap_query_files); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.minimr_query_files); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.minitez_query_files); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.encrypted_query_files); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.spark_only_query_files); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.disabled_query_files); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestCompareCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestCompareCliDriver.java index 944cd32..b41af83 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestCompareCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestCompareCliDriver.java @@ -18,45 +18,66 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestCompareCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; + + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientcompare", + "ql/src/test/results/clientcompare", "itests/qtest/target/qfile-results/clientcompare", + null, QTestUtil.MiniClusterType.none, "", "q_test_init.sql", "q_test_cleanup.sql", false); + } - static CliAdapter adapter = new CliConfigs.CompareCliConfig().getCliAdapter(); + public TestCompareCliDriver(String name, File qfile) { + this.qfile = qfile; + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - private String name; - private File qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } - public TestCompareCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runVersionedTest(qfile); } } diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestContribCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestContribCliDriver.java index 54596f9..4a2b09b 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestContribCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestContribCliDriver.java @@ -18,45 +18,66 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestContribCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.ContribCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("contrib/src/test/queries/clientpositive", + "contrib/src/test/results/clientpositive", + "itests/qtest/target/qfile-results/contribclientpositive", null, + QTestUtil.MiniClusterType.none, "", "q_test_init.sql", "q_test_cleanup.sql", true); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestContribCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestContribCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - -} +} \ No newline at end of file diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestContribNegativeCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestContribNegativeCliDriver.java index 1b39ee7..a77afed 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestContribNegativeCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestContribNegativeCliDriver.java @@ -18,45 +18,66 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestContribNegativeCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.ContribNegativeCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("contrib/src/test/queries/clientnegative", + "contrib/src/test/results/clientnegative", + "itests/qtest/target/qfile-results/contribclientnegative", null, + QTestUtil.MiniClusterType.none, "", "q_test_init.sql", "q_test_cleanup.sql", true); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestContribNegativeCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestContribNegativeCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runFailingQuery(qfile); } - -} +} \ No newline at end of file diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestEncryptedHDFSCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestEncryptedHDFSCliDriver.java index 8c6807e..032e4f9 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestEncryptedHDFSCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestEncryptedHDFSCliDriver.java @@ -18,45 +18,68 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestEncryptedHDFSCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.EncryptedHDFSCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientpositive", + "ql/src/test/results/clientpositive/encrypted", + "itests/qtest/target/qfile-results/clientpositive", null, + QTestUtil.MiniClusterType.encrypted, "data/conf", "q_test_init_for_encryption.sql", + "q_test_cleanup_for_encryption.sql", true); + cliConfig.includeQuerySet(CliConfig.TestFileSet.encrypted_query_files); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestEncryptedHDFSCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestEncryptedHDFSCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseCliDriver.java index 7b6f76a..0f8d154 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseCliDriver.java @@ -18,45 +18,70 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.hbase.HBaseQTestUtil; +import org.apache.hadoop.hive.hbase.HBaseTestSetup; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestHBaseCliDriver { + private static CliConfig cliConfig; + private static HBaseQTestUtil qt; + private static HBaseTestSetup setup = new HBaseTestSetup(); + private File qfile; - static CliAdapter adapter = new CliConfigs.HBaseCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("hbase-handler/src/test/queries/positive", + "hbase-handler/src/test/results/positive", + "itests/qtest/target/qfile-results/hbase-handler/positive", null, + QTestUtil.MiniClusterType.none, "", "q_test_init.sql", "q_test_cleanup.sql", false); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestHBaseCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new HBaseQTestUtil(cliConfig, setup); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestHBaseCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + setup.tearDown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseMinimrCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseMinimrCliDriver.java index 934af16..93f4926 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseMinimrCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseMinimrCliDriver.java @@ -18,45 +18,75 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.hbase.HBaseQTestUtil; +import org.apache.hadoop.hive.hbase.HBaseTestSetup; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) -public class TestHBaseMinimrCliDriver { +public class TestHBaseMinimrCliDriver { + private static CliConfig cliConfig; + private static HBaseQTestUtil qt; + private static HBaseTestSetup setup = new HBaseTestSetup(); + private File qfile; - static CliAdapter adapter = new CliConfigs.HBaseMinimrCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("hbase-handler/src/test/queries/positive", + "hbase-handler/src/test/results/positive", + "itests/qtest/target/qfile-results/hbase-handler/minimrpositive", null, + QTestUtil.MiniClusterType.mr, "", "q_test_init.sql", "q_test_cleanup.sql", false); - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + // XXX: i think this was non intentionally set to run only hbase_bulk.m??? + // includeQuery("hbase_bulk.m"); => will be filter out because not ends with .q + // to keep existing behaviour i added this method + cliConfig.overrideUserQueryFile("hbase_bulk.m"); } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + public TestHBaseMinimrCliDriver(String name, File qfile) { + this.qfile = qfile; + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new HBaseQTestUtil(cliConfig, setup); + qt.cleanUp(); + qt.createSources(); + } - private String name; - private File qfile; + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - public TestHBaseMinimrCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } + + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + setup.tearDown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseNegativeCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseNegativeCliDriver.java index 88d626c..e49d43b 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseNegativeCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestHBaseNegativeCliDriver.java @@ -18,45 +18,70 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.hbase.HBaseQTestUtil; +import org.apache.hadoop.hive.hbase.HBaseTestSetup; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestHBaseNegativeCliDriver { + private static CliConfig cliConfig; + private static HBaseQTestUtil qt; + private static HBaseTestSetup setup = new HBaseTestSetup(); + private File qfile; - static CliAdapter adapter = new CliConfigs.HBaseNegativeCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("hbase-handler/src/test/queries/negative", + "hbase-handler/src/test/results/negative", + "itests/qtest/target/qfile-results/hbase-handler/negative", null, + QTestUtil.MiniClusterType.none, "", "q_test_init.sql", "q_test_cleanup.sql", false); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestHBaseNegativeCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new HBaseQTestUtil(cliConfig, setup); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestHBaseNegativeCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + setup.tearDown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runFailingQuery(qfile); } - -} +} \ No newline at end of file diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapCliDriver.java index ad525fe..41f774b 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapCliDriver.java @@ -18,45 +18,69 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestMiniLlapCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.MiniLlapCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientpositive", + "ql/src/test/results/clientpositive/llap", + "itests/qtest/target/qfile-results/clientpositive", null, + QTestUtil.MiniClusterType.llap, "data/conf/llap", "q_test_init.sql", + "q_test_cleanup.sql", true); + cliConfig.includeQuerySet(CliConfig.TestFileSet.minillap_query_files); + cliConfig.includeQuerySet(CliConfig.TestFileSet.minillap_shared_query_files); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestMiniLlapCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestMiniLlapCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniTezCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniTezCliDriver.java index c23b0b3..79b27a0 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniTezCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniTezCliDriver.java @@ -18,45 +18,71 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestMiniTezCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.MiniTezCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientpositive", + "ql/src/test/results/clientpositive/tez", + "itests/qtest/target/qfile-results/clientpositive", CliConfig.MetastoreType.hbase, + QTestUtil.MiniClusterType.tez, "data/conf/tez", "q_test_init.sql", "q_test_cleanup.sql", + true); + cliConfig.includeQuerySet(CliConfig.TestFileSet.minitez_query_files); + cliConfig.includeQuerySet(CliConfig.TestFileSet.minitez_query_files_shared); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.minillap_query_files); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.minillap_shared_query_files); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestMiniTezCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestMiniTezCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMinimrCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMinimrCliDriver.java index 96a9e8f..82aa74a 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMinimrCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMinimrCliDriver.java @@ -18,45 +18,66 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestMinimrCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.MinimrCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientpositive", + "ql/src/test/results/clientpositive", "itests/qtest/target/qfile-results/clientpositive", + null, QTestUtil.MiniClusterType.mr, "", "q_test_init.sql", "q_test_cleanup.sql", true); + cliConfig.includeQuerySet(CliConfig.TestFileSet.minimr_query_files); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestMinimrCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestMinimrCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestNegativeCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestNegativeCliDriver.java index 1040228..2334a2d 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestNegativeCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestNegativeCliDriver.java @@ -18,45 +18,67 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestNegativeCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.NegativeCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientnegative", + "ql/src/test/results/clientnegative", "itests/qtest/target/qfile-results/clientnegative", + null, QTestUtil.MiniClusterType.none, "", "q_test_init.sql", "q_test_cleanup.sql", true); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.minimr_query_negative_files); + cliConfig.excludeQuery("authorization_uri_import.q"); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestNegativeCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestNegativeCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runFailingQuery(qfile); } - -} +} \ No newline at end of file diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestNegativeMinimrCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestNegativeMinimrCliDriver.java index f7e2caa..9d43517 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestNegativeMinimrCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestNegativeMinimrCliDriver.java @@ -18,45 +18,66 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestNegativeMinimrCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.NegativeMinimrCli().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientnegative", + "ql/src/test/results/clientnegative", "itests/qtest/target/qfile-results/clientnegative", + null, QTestUtil.MiniClusterType.mr, "", "q_test_init.sql", "q_test_cleanup.sql", true); + cliConfig.includeQuerySet(CliConfig.TestFileSet.minimr_query_negative_files); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestNegativeMinimrCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @Before + public void before() throws Exception { + qt.clearTestSideEffects(); + } - private String name; - private File qfile; + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - public TestNegativeMinimrCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } + + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runFailingQuery(qfile); } - -} +} \ No newline at end of file diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestPerfCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestPerfCliDriver.java index 4df4eeb..39577d7 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestPerfCliDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestPerfCliDriver.java @@ -18,45 +18,67 @@ package org.apache.hadoop.hive.cli; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestPerfCliDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.PerfCliConfig().getCliAdapter(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/clientpositive/perf", + "ql/src/test/results/clientpositive/perf", + "itests/qtest/target/qfile-results/clientpositive", null, + QTestUtil.MiniClusterType.tez, "data/conf/perf-reg/", "q_perf_test_init.sql", + "q_perf_test_cleanup.sql", false); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.minimr_query_files); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.minitez_query_files); + cliConfig.excludeQuerySet(CliConfig.TestFileSet.encrypted_query_files); + } - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + public TestPerfCliDriver(String name, File qfile) { + this.qfile = qfile; } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + @BeforeClass + public static void beforeClass() throws Exception { + System.setProperty("datanucleus.schema.autoCreateAll", "true"); + System.setProperty("hive.metastore.schema.verification", "false"); + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + QTestUtil.setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(qt.getConf()); + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @After + public void after() throws Exception { + qt.clearPostTestEffects(); + } - private String name; - private File qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.shutdown(); + } - public TestPerfCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runQuery(qfile); } - } diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/ql/parse/TestParseNegativeDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/ql/parse/TestParseNegativeDriver.java index 4c1224f..f6d5f87 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/ql/parse/TestParseNegativeDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/ql/parse/TestParseNegativeDriver.java @@ -18,45 +18,55 @@ package org.apache.hadoop.hive.ql.parse; import java.io.File; +import java.io.IOException; import java.util.List; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.junit.ClassRule; -import org.junit.Rule; +import org.apache.hadoop.hive.cli.control.CliConfig; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestParseNegativeDriver { + private static CliConfig cliConfig; + private static QTestUtil qt; + private File qfile; - static CliAdapter adapter = new CliConfigs.ParseNegativeConfig().getCliAdapter(); - - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); + private static void initConfig() throws IOException { + cliConfig = new CliConfig("ql/src/test/queries/negative", + "ql/src/test/results/compiler/errors", "itests/qtest/target/qfile-results/negative", null, + QTestUtil.MiniClusterType.none, "", "q_test_init.sql", "q_test_cleanup.sql", false); } - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); + public TestParseNegativeDriver(String name, File qfile) { + this.qfile = qfile; + } - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); + @BeforeClass + public static void beforeClass() throws Exception { + qt = new QTestUtil(cliConfig); + qt.cleanUp(); + qt.createSources(); + } - private String name; - private File qfile; + @AfterClass + public static void afterClass() throws Exception { + qt.clearPostTestEffects(); + qt.shutdown(); + } - public TestParseNegativeDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; + @Parameterized.Parameters(name = "{0}") + public static List getQueryFiles() throws Exception { + initConfig(); + return cliConfig.getQueryFilesAsParameters(); } @Test public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); + qt.runParseTest(qfile); } - } diff --git itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java index 88bc0bc..c9880ca 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java @@ -17,12 +17,18 @@ */ package org.apache.hadoop.hive.accumulo; +import org.apache.hadoop.hive.cli.control.CliConfig; import org.apache.hadoop.hive.ql.QTestUtil; /** * AccumuloQTestUtil initializes Accumulo-specific test fixtures. */ public class AccumuloQTestUtil extends QTestUtil { + public AccumuloQTestUtil(CliConfig cliConfig, AccumuloTestSetup setup) throws Exception { + this(cliConfig.getResultsDir(), cliConfig.getLogDir(), cliConfig.getClusterType(), + setup, cliConfig.getInitScript(), cliConfig.getCleanupScript()); + } + public AccumuloQTestUtil(String outDir, String logDir, MiniClusterType miniMr, AccumuloTestSetup setup, String initScript, String cleanupScript) throws Exception { diff --git itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloTestSetup.java itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloTestSetup.java index 73d5f15..00692e4 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloTestSetup.java +++ itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloTestSetup.java @@ -34,6 +34,7 @@ import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.minicluster.MiniAccumuloCluster; import org.apache.accumulo.minicluster.MiniAccumuloConfig; +import org.apache.commons.io.FileUtils; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; @@ -54,6 +55,10 @@ protected void setupWithHiveConf(HiveConf conf) throws Exception { String testTmpDir = System.getProperty("test.tmp.dir"); File tmpDir = new File(testTmpDir, "accumulo"); + if (tmpDir.exists()) { + FileUtils.deleteDirectory(tmpDir); + } + MiniAccumuloConfig cfg = new MiniAccumuloConfig(tmpDir, PASSWORD); cfg.setNumTservers(1); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java deleted file mode 100644 index efbd465..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java +++ /dev/null @@ -1,417 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli.control; - -import java.io.File; -import java.io.FileFilter; -import java.io.IOException; -import java.io.InputStream; -import java.lang.reflect.Constructor; -import java.net.URL; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Properties; -import java.util.Set; -import java.util.regex.Pattern; - -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hive.ql.QTestUtil; -import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; -import com.google.common.base.Splitter; -import com.google.common.collect.Sets; - -public abstract class AbstractCliConfig { - - public static final String HIVE_ROOT = getHiveRoot(); - - public static enum MetastoreType { - sql, hbase - }; - - private MetastoreType metastoreType = MetastoreType.sql; - private String queryFile; - private String queryFileRegex; - private String queryDirectory; - // pending change to boolean - private String runDisabled; - // FIXME: file paths in strings should be changed to either File or Path ... anything but String - private String resultsDirectory; - private Set excludedQueryFileNames = new LinkedHashSet<>(); - private String hadoopVersion; - private String logDirectory; - // these should have viable defaults - private String cleanupScript; - private String initScript; - private String hiveConfDir; - private MiniClusterType clusterType; - - // FIXME: null value is treated differently on the other end..when those filter will be - // moved...this may change - private Set includeQueryFileNames; - private Class cliAdapter; - - public AbstractCliConfig(Class adapter) { - cliAdapter=adapter; - clusterType = MiniClusterType.none; - queryFile = getSysPropValue("qfile"); - queryFileRegex = getSysPropValue("qfile_regex"); - runDisabled = getSysPropValue("run_disabled"); - } - - private static String getHiveRoot() { - List candidateSiblings = new ArrayList<>(); - if (System.getProperty("hive.root") != null) { - try { - candidateSiblings.add(new File(System.getProperty("hive.root")).getCanonicalPath()); - } catch (IOException e) { - throw new RuntimeException("error getting hive.root",e); - } - } - candidateSiblings.add(new File(".").getAbsolutePath()); - - for (String string : candidateSiblings) { - File curr = new File(string); - do { - Set lls = Sets.newHashSet(curr.list()); - if (lls.contains("itests") && lls.contains("ql") && lls.contains("metastore")) { - System.out.println("detected hiveRoot: " + curr); - return QTestUtil.ensurePathEndsInSlash(curr.getAbsolutePath()); - } - curr = curr.getParentFile(); - } while (curr != null); - } - throw new RuntimeException("unable to find hiveRoot"); - } - - protected void setQueryDir(String dir) { - queryDirectory = getAbsolutePath(dir); - } - - @Deprecated - public void overrideUserQueryFile(String q) { - queryFile = q; - } - - public void includesFrom(URL resource, String key) { - try (InputStream is = resource.openStream()) { - Properties props = new Properties(); - props.load(is); - String fileNames = getSysPropValue(key); - if (fileNames == null) { - fileNames = props.getProperty(key); - } - if (fileNames != null) { - for (String qFile : TEST_SPLITTER.split(fileNames)) { - includeQuery(qFile); - } - } - } catch (IOException e) { - throw new RuntimeException("error processing:" + resource, e); - } - } - - protected void includeQuery(String qFile) { - if (includeQueryFileNames == null) { - includeQueryFileNames = new HashSet<>(); - } - includeQueryFileNames.add(qFile); - } - - public void excludesFrom(URL resource, String key) { - try (InputStream is = resource.openStream()) { - Properties props = new Properties(); - props.load(is); - - String fileNames = getSysPropValue(key); - if (fileNames == null) { - fileNames = props.getProperty(key); - } - if (fileNames != null) { - for (String qFile : TEST_SPLITTER.split(fileNames)) { - excludeQuery(qFile); - } - } - } catch (IOException e) { - throw new RuntimeException("error processing:" + resource, e); - } - } - - protected void excludeQuery(String qFile) { - excludedQueryFileNames.add(qFile); - } - - - private static final Splitter TEST_SPLITTER = - Splitter.onPattern("[, ]").trimResults().omitEmptyStrings(); - - public static class IncludeFilter implements FileFilter { - - Set includeOnly; - - public IncludeFilter(Set includeOnly) { - this.includeOnly = includeOnly; - } - - @Override - public boolean accept(File fpath) { - return includeOnly == null || includeOnly.contains(fpath.getName()); - } - } - - public static class QFileFilter extends IncludeFilter { - - public QFileFilter(Set includeOnly) { - super(includeOnly); - } - - @Override - public boolean accept(File fpath) { - if (!super.accept(fpath)) { - return false; - } - if (fpath.isDirectory() || !fpath.getName().endsWith(".q")) { - return false; - } - return true; - } - } - - public static class DisabledQFileFilter extends IncludeFilter { - public DisabledQFileFilter(Set includeOnly) { - super(includeOnly); - } - - @Override - public boolean accept(File fpath) { - if (!super.accept(fpath)) { - return false; - } - return !fpath.isDirectory() && fpath.getName().endsWith(".q.disabled"); - } - } - - public static class QFileRegexFilter implements FileFilter { - Pattern filterPattern; - - public QFileRegexFilter(String filter) { - filterPattern = Pattern.compile(filter); - } - - @Override - public boolean accept(File filePath) { - if (filePath.isDirectory() || !filePath.getName().endsWith(".q")) { - return false; - } - String testName = StringUtils.chomp(filePath.getName(), ".q"); - return filterPattern.matcher(testName).matches(); - } - } - - public Set getQueryFiles() throws Exception { - prepareDirs(); - - Set includeOnly = includeQueryFileNames; - - // queryDirectory should not be null - File queryDir = new File(queryDirectory); - - // dedup file list - Set testFiles = new LinkedHashSet<>(); - if (queryFile != null && !queryFile.equals("")) { - // The user may have passed a list of files - comma separated - for (String qFile : TEST_SPLITTER.split(queryFile)) { - if (null != queryDir) { - testFiles.add(new File(queryDir, qFile)); - } else { - testFiles.add(new File(qFile)); - } - } - } else if (queryFileRegex != null && !queryFileRegex.equals("")) { - for (String regex : TEST_SPLITTER.split(queryFileRegex)) { - testFiles.addAll(Arrays.asList(queryDir.listFiles(new QFileRegexFilter(regex)))); - } - } else if (runDisabled != null && runDisabled.equals("true")) { - testFiles.addAll(Arrays.asList(queryDir.listFiles(new DisabledQFileFilter(includeOnly)))); - } else { - testFiles.addAll(Arrays.asList(queryDir.listFiles(new QFileFilter(includeOnly)))); - } - - for (String qFileName : excludedQueryFileNames) { - testFiles.remove(new File(queryDir, qFileName)); - } - - return testFiles; - } - - private void prepareDirs() throws Exception { - File hiveRootDir = new File(HIVE_ROOT); - if (!hiveRootDir.exists()) { - throw new RuntimeException( - "Hive Root Directory " + hiveRootDir.getCanonicalPath() + " does not exist"); - } - - File logDir = new File(logDirectory); - if (!logDir.exists()) { - FileUtils.forceMkdir(logDir); - } - - File resultsDir = new File(resultsDirectory); - if (!resultsDir.exists()) { - FileUtils.forceMkdir(resultsDir); - } - } - - public String getHadoopVersion() { - if (hadoopVersion == null) { - System.out.println("detecting hadoop.version from loaded libs"); - try { - String hadoopPropsLoc = "/META-INF/maven/org.apache.hadoop/hadoop-hdfs/pom.properties"; - URL hadoopPropsURL = getClass().getResource(hadoopPropsLoc); - if (hadoopPropsURL == null) { - throw new RuntimeException("failed to get hadoop properties: " + hadoopPropsLoc); - } - try (InputStream is = hadoopPropsURL.openStream()) { - Properties props = new Properties(); - props.load(is); - hadoopVersion = props.getProperty("version"); - if (hadoopVersion == null) { - throw new RuntimeException("version property not found"); - } - } catch (IOException e) { - throw new RuntimeException("unable to extract hadoop.version from: " + hadoopPropsURL, e); - } - } catch (Exception e) { - throw new RuntimeException( - "can't get hadoop.version ; specify manually using hadoop.version property!"); - } - } - return hadoopVersion; - } - - protected void setHadoopVersion(String hadoopVersion) { - this.hadoopVersion = hadoopVersion; - } - - public String getLogDir() { - return logDirectory; - } - - protected void setLogDir(String logDirectory) { - this.logDirectory = getAbsolutePath(logDirectory); - } - - public String getResultsDir() { - return resultsDirectory; - } - - protected void setResultsDir(String resultsDir) { - resultsDirectory = getAbsolutePath(resultsDir); - } - - public String getCleanupScript() { - return cleanupScript; - } - - protected void setCleanupScript(String cleanupScript) { - this.cleanupScript = cleanupScript; - } - - public String getInitScript() { - return initScript; - } - - protected void setInitScript(String initScript) { - String initScriptPropValue = getSysPropValue("initScript"); - if (initScriptPropValue != null) { - System.out.println("initScript override(by system property):" + initScriptPropValue); - this.initScript = initScriptPropValue; - } else { - this.initScript = initScript; - } - } - - public String getHiveConfDir() { - return hiveConfDir; - } - - protected void setHiveConfDir(String hiveConfDir) { - if (hiveConfDir.trim().isEmpty()) { - this.hiveConfDir = hiveConfDir; - } else { - this.hiveConfDir = getAbsolutePath(hiveConfDir); - } - } - - public MiniClusterType getClusterType() { - return clusterType; - } - - protected void setClusterType(MiniClusterType type) { - String modeStr = getSysPropValue("clustermode"); - if (modeStr != null) { - // FIXME: this should be changeto valueOf ... - // that will also kill that fallback 'none' which is I think more like a problem than a - // feature ;) - clusterType = MiniClusterType.valueForString(modeStr); - } else { - clusterType = type; - } - if (clusterType == null) { - throw new RuntimeException("clustertype cant be null"); - } - } - - private String getSysPropValue(String propName) { - String propValue = System.getProperty(propName); - if (propValue == null || propValue.trim().length() == 0) { - return null; - } - System.out.println("property: " + propName + " used as override with val: " + propValue); - return propValue.trim(); - } - - public CliAdapter getCliAdapter() { - try { - Constructor cz = cliAdapter.getConstructor(AbstractCliConfig.class); - return cz.newInstance(this); - } catch (Exception e) { - throw new RuntimeException("unable to build adapter", e); - } - } - - protected void setMetastoreType(MetastoreType mt) { - metastoreType=mt; - } - - public MetastoreType getMetastoreType() { - return metastoreType; - } - - public String getQueryDirectory() { - return queryDirectory; - } - - private String getAbsolutePath(String dir) { - return new File(new File(HIVE_ROOT), dir).getAbsolutePath(); - } - -} diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java deleted file mode 100644 index b89d6e7..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java +++ /dev/null @@ -1,107 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli.control; - -import java.io.File; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -import org.junit.rules.TestRule; -import org.junit.runner.Description; -import org.junit.runners.model.Statement; - -/** - * This class adapts old vm test-executors to be executed in multiple instances - */ -public abstract class CliAdapter { - - protected final AbstractCliConfig cliConfig; - - public CliAdapter(AbstractCliConfig cliConfig) { - this.cliConfig = cliConfig; - } - - public final List getParameters() throws Exception { - Set f = cliConfig.getQueryFiles(); - List ret = new ArrayList<>(); - - for (File file : f) { - String label = file.getName().replaceAll("\\.[^\\.]+$", ""); - ret.add(new Object[] { label, file }); - } - return ret; - } - - abstract public void beforeClass() throws Exception; - - // HIVE-14444 pending rename: before - abstract public void setUp(); - - // HIVE-14444 pending rename: after - abstract public void tearDown(); - - // HIVE-14444 pending rename: afterClass - abstract public void shutdown() throws Exception; - - abstract public void runTest(String name, String name2, String absolutePath) throws Exception; - - public final TestRule buildClassRule() { - return new TestRule() { - @Override - public Statement apply(final Statement base, Description description) { - return new Statement() { - @Override - public void evaluate() throws Throwable { - CliAdapter.this.beforeClass(); - try { - base.evaluate(); - } finally { - CliAdapter.this.shutdown(); - } - } - }; - } - }; - } - - public final TestRule buildTestRule() { - return new TestRule() { - @Override - public Statement apply(final Statement base, Description description) { - return new Statement() { - @Override - public void evaluate() throws Throwable { - CliAdapter.this.setUp(); - try { - base.evaluate(); - } finally { - CliAdapter.this.tearDown(); - } - } - }; - } - }; - } - - // HIVE-14444: pending refactor to push File forward - public final void runTest(String name, File qfile) throws Exception { - runTest(name, qfile.getName(), qfile.getAbsolutePath()); - } - -} diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfig.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfig.java new file mode 100644 index 0000000..edcaacf --- /dev/null +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfig.java @@ -0,0 +1,409 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.cli.control; + +import java.io.File; +import java.io.FileFilter; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Properties; +import java.util.Set; +import java.util.regex.Pattern; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; +import com.google.common.base.Splitter; + +/** + * Stores the test configuration values, and computes the tests to run + */ +public class CliConfig { + // Only one of the next 3 will be used, in the following precedence, if none of these are set, + // then every file which extension is 'q' in the queryDirectory will be used + + // The queryFile(s) to run (separated by ',' or space), as provided by commandline + private String queryFile; + // Run queries where the filename matches this regular expression, as provided by commandline + private String queryFileRegex; + // Should the test run disabled queries (filename ends with .q.disabled), as provided by + // commandline + private boolean runDisabled; + + // In which directory should the test look for the queries to run + private File queryDirectory; + // The directory storing the expected output files for the queries + private File resultsDirectory; + // The log directory where the query outputs are collected + private File logDirectory; + // The used MetastoreType - sql, or hbase + private MetastoreType metastoreType; + // The cluster type to run the test against + private MiniClusterType clusterType; + // The hive configuration directory for the cluster to set up to run the test against + private String hiveConfDir; + // The cleanup script to run before every test + private String cleanupScript; + // The init script to run BeforeClass + private String initScript; + // The hadoop version to calculate the running queries + private String hadoopVersion; + // Should the LLAP Io initialized, or not + private boolean withLlapIo; + + // Should every query from the queryDirectory should be used, or just specific tests + // If the user set at least one inclusive query list (includeQuerySet, includeQuery), then this + // will be set to true, and only the selected files will be used, not the every file from the + // queryDirectory + private boolean includeAll; + // These are the specific files that should be included in the test list - if includeAll is false + private Set includedQueryFileNames; + // The query files which should be excluded from the test list + private Set excludedQueryFileNames; + // The configuration files containing the querySets + private Properties testFilesConf; + + private static final Splitter TEST_SPLITTER = + Splitter.onPattern("[, ]").trimResults().omitEmptyStrings(); + + public CliConfig(String queryDirectory, String resultsDirectory, String logDirectory, + MetastoreType metastoreType, MiniClusterType miniClusterType, String hiveConfDir, + String initScript, String cleanupScript, boolean withLlapIo) throws IOException { + this.queryDirectory = new File(QTestUtil.getHiveRootDir() + getProperty("queryDirectory", + queryDirectory, "./")); + this.resultsDirectory = new File(QTestUtil.getHiveRootDir() + getProperty("resultsDirectory", + resultsDirectory, "./")); + this.logDirectory = new File(QTestUtil.getHiveRootDir() + getProperty("logDirectory", + logDirectory, "./")); + + String metastoreTypeString = System.getProperty("metastoreType"); + if (metastoreTypeString != null && metastoreTypeString.trim().length() !=0) { + System.err.println("Property: metastoreType used as override with val: " + + metastoreTypeString); + this.metastoreType = MetastoreType.valueOf(metastoreTypeString); + } else if (metastoreType != null) { + this.metastoreType = metastoreType; + } else { + this.metastoreType = MetastoreType.sql; + } + + String clusterTypeString = System.getProperty("clustermode"); + if (clusterTypeString != null && clusterTypeString.trim().length() !=0) { + System.out.println("Property: clustermode used as override with val: " + clusterTypeString); + // FIXME: this should be changeto valueOf ... + // that will also kill that fallback 'none' which is I think more like a problem than a + // feature ;) + this.clusterType = MiniClusterType.valueForString(clusterTypeString); + } else if (miniClusterType != null) { + this.clusterType = miniClusterType; + } else { + this.clusterType = MiniClusterType.none; + } + + this.hiveConfDir = getProperty("hiveConfDir", hiveConfDir, ""); + this.initScript = getProperty("initScript", initScript, ""); + this.cleanupScript = getProperty("cleanupScript", cleanupScript, ""); + this.hadoopVersion = getProperty("hadoop.version", null, + QTestUtil.getHadoopVersionFromJar()); + this.withLlapIo = withLlapIo; + + queryFile = getProperty("qfile"); + queryFileRegex = getProperty("qfile_regex"); + String runDisabledString = getProperty("run_disabled"); + if (runDisabledString==null || !runDisabledString.trim().equals("true")) { + runDisabled = false; + } else { + runDisabled = true; + } + + includeAll = true; + includedQueryFileNames = new HashSet(); + excludedQueryFileNames = new HashSet(); + + FileInputStream is = new FileInputStream(QTestUtil.getHiveRootDir() + + "itests/src/test/resources/testconfiguration.properties"); + testFilesConf = new Properties(); + testFilesConf.load(is); + + prepareDirs(); + } + + public String getQueryDir() { + return queryDirectory.getAbsolutePath(); + } + + public String getResultsDir() { + return resultsDirectory.getAbsolutePath(); + } + + public String getLogDir() { + return logDirectory.getAbsolutePath(); + } + + public String getCleanupScript() { + return cleanupScript; + } + + public String getInitScript() { + return initScript; + } + + public String getHiveConfDir() { + return hiveConfDir; + } + + public MiniClusterType getClusterType() { + return clusterType; + } + + public MetastoreType getMetastoreType() { + return metastoreType; + } + + public String getHadoopVersion() { + return hadoopVersion; + } + + public boolean isWithLlapIo() { + return withLlapIo; + } + + /** + * Should use query files with .q extensions, and use the appropriate exclude, and include + * methods to archive the expected list of queries for the given configuration + * @param q + */ + @Deprecated + public void overrideUserQueryFile(String q) { + queryFile = q; + } + + /** + * Include a whole set of query files + * @param testFileSet The set to include to the list of tests + */ + public void includeQuerySet(TestFileSet testFileSet) { + Iterable fileNames = getFileNames(testFileSet); + if (fileNames != null) { + for (String qFile : fileNames) { + includeQuery(qFile); + } + } + } + + /** + * Include a specific query + * @param qFile The specific query to add to the list of tests + */ + public void includeQuery(String qFile) { + includeAll = false; + includedQueryFileNames.add(qFile); + } + + /** + * Exclude a whole set of query files + * @param testFileSet The set to exclude from the list of tests + */ + public void excludeQuerySet(TestFileSet testFileSet) { + Iterable fileNames = getFileNames(testFileSet); + if (fileNames != null) { + for (String qFile : fileNames) { + excludeQuery(qFile); + } + } + } + + /** + * Exclude a specific query + * @param qFile The specific query to exclude from the list of tests + */ + public void excludeQuery(String qFile) { + excludedQueryFileNames.add(qFile); + } + + public List getQueryFilesAsParameters() throws Exception { + List ret = new ArrayList<>(); + + // get the file list + Set testFiles = new LinkedHashSet<>(); + if (queryFile != null && !queryFile.equals("")) { + // The user may have passed a list of files - comma separated + for (String qFile : TEST_SPLITTER.split(queryFile)) { + testFiles.add(new File(queryDirectory, qFile)); + } + } else if (queryFileRegex != null && !queryFileRegex.equals("")) { + for (String regex : TEST_SPLITTER.split(queryFileRegex)) { + testFiles.addAll(Arrays.asList(queryDirectory.listFiles(new QFileRegexFilter(regex)))); + } + } else if (runDisabled) { + testFiles.addAll(Arrays.asList(queryDirectory.listFiles( + new IncludeFilter(includeAll, includedQueryFileNames, ".q.disabled")))); + } else { + testFiles.addAll(Arrays.asList(queryDirectory.listFiles( + new IncludeFilter(includeAll, includedQueryFileNames, ".q")))); + } + + for (String qFileName : excludedQueryFileNames) { + testFiles.remove(new File(queryDirectory, qFileName)); + } + + // Convert the file list to { name, file } pairs + for (File file : testFiles) { + String label = file.getName().replaceAll("\\.[^\\.]+$", ""); + ret.add(new Object[] { label, file }); + } + return ret; + } + + /** + * Get and trim a specific System property. Returns null, if the property is empty or missing + * @param propName The property name to fetch + * @return The value of the property, null if missing, or empty + */ + private String getProperty(String propName) { + String propValue = System.getProperty(propName); + if (propValue == null || propValue.trim().length() == 0) { + return null; + } + return propValue.trim(); + } + + /** + * Calculate the value of a value of a property with the following precedence + * - SystemProperty, if it is set + * - ConstructorValue, if the SystemProperty is not set + * - DefaultValue, if neither SystemProperty, nor ConstructorValue is set + * @param systemPropertyKey The key of the SystemProperty to look for + * @param constructorValue The ConstructorValue + * @param defaultValue The DefaultValue + * @return The value calulated using the precedence above + */ + private String getProperty(String systemPropertyKey, String constructorValue, + String defaultValue) { + String propValue = getProperty(systemPropertyKey); + if (propValue !=null) { + System.out.println("Property: " + systemPropertyKey + " used as override with val: " + + propValue); + return propValue; + } + if (constructorValue != null) { + return constructorValue; + } + System.out.println("Property: " + systemPropertyKey + " is not set, using default value: " + + defaultValue); + return defaultValue; + } + + /** + * Get the actual file names in the testFilesConf for the given filesettype. + * @param fileSet The type of the fileset + * @return Iterator of the query file names in the given fileset + */ + private Iterable getFileNames(TestFileSet fileSet) { + String fileSetConfigName = fileSet.name().replaceAll("_","."); + String fileNames = getProperty(fileSetConfigName); + if (fileNames == null) { + fileNames = testFilesConf.getProperty(fileSetConfigName); + } + if (fileNames != null) { + return TEST_SPLITTER.split(fileNames); + } + return null; + } + + /** + * Initialize the necessary test direcotries + * @throws IOException + */ + private void prepareDirs() throws IOException { + File hiveRootDir = new File(QTestUtil.getHiveRootDir()); + if (!hiveRootDir.exists()) { + throw new RuntimeException( + "Hive Root Directory " + hiveRootDir.getCanonicalPath() + " does not exist"); + } + + if (!logDirectory.exists()) { + FileUtils.forceMkdir(logDirectory); + } + + if (!resultsDirectory.exists()) { + FileUtils.forceMkdir(resultsDirectory); + } + } + + public static class IncludeFilter implements FileFilter { + boolean includeAll; + Set includeOnly; + String endsWith; + + public IncludeFilter(boolean includeAll, Set includeOnly, String endsWith) { + this.includeAll = includeAll; + this.includeOnly = includeOnly; + this.endsWith = endsWith; + } + + @Override + public boolean accept(File fpath) { + return !fpath.isDirectory() && (includeAll || includeOnly.contains(fpath.getName())) && + fpath.getName().endsWith(endsWith); + } + } + + public static class QFileRegexFilter implements FileFilter { + Pattern filterPattern; + + public QFileRegexFilter(String filter) { + filterPattern = Pattern.compile(filter); + } + + @Override + public boolean accept(File filePath) { + if (filePath.isDirectory() || !filePath.getName().endsWith(".q")) { + return false; + } + String testName = StringUtils.chomp(filePath.getName(), ".q"); + return filterPattern.matcher(testName).matches(); + } + } + + public enum MetastoreType { + sql, hbase + } + + public enum TestFileSet { + minimr_query_files, + disabled_query_files, + minitez_query_files_shared, + minitez_query_files, + minillap_shared_query_files, + minillap_query_files, + encrypted_query_files, + beeline_positive_exclude, + minimr_query_negative_files, + spark_query_files, + spark_only_query_files, + miniSparkOnYarn_query_files, + spark_query_negative_files + } +} diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java deleted file mode 100644 index 319a205..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java +++ /dev/null @@ -1,503 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli.control; - -import java.io.File; -import java.net.MalformedURLException; -import java.net.URL; - -import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; -import org.apache.hadoop.hive.ql.parse.CoreParseNegative; - -public class CliConfigs { - - private static URL testConfigProps = getTestPropsURL(); - - private static URL getTestPropsURL() { - try { - return new File( - AbstractCliConfig.HIVE_ROOT + "/itests/src/test/resources/testconfiguration.properties") - .toURI().toURL(); - } catch (MalformedURLException e) { - throw new RuntimeException(e); - } - } - - public static class CliConfig extends AbstractCliConfig { - - public CliConfig() { - super(CoreCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientpositive"); - - excludesFrom(testConfigProps, "minillap.query.files"); - excludesFrom(testConfigProps, "minimr.query.files"); - excludesFrom(testConfigProps, "minitez.query.files"); - excludesFrom(testConfigProps, "encrypted.query.files"); - excludesFrom(testConfigProps, "spark.only.query.files"); - excludesFrom(testConfigProps, "disabled.query.files"); - - setResultsDir("ql/src/test/results/clientpositive"); - setLogDir("itests/qtest/target/qfile-results/clientpositive"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.none); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class ParseNegativeConfig extends AbstractCliConfig { - public ParseNegativeConfig() { - super(CoreParseNegative.class); - try { - setQueryDir("ql/src/test/queries/negative"); - - setResultsDir("ql/src/test/results/compiler/errors"); - setLogDir("itests/qtest/target/qfile-results/negative"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir("data/conf/perf-reg/"); - setClusterType(MiniClusterType.none); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class MinimrCliConfig extends AbstractCliConfig { - public MinimrCliConfig() { - super(CoreCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientpositive"); - - includesFrom(testConfigProps, "minimr.query.files"); - - setResultsDir("ql/src/test/results/clientpositive"); - setLogDir("itests/qtest/target/qfile-results/clientpositive"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.mr); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class MiniTezCliConfig extends AbstractCliConfig { - public MiniTezCliConfig() { - super(CoreCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientpositive"); - - includesFrom(testConfigProps, "minitez.query.files"); - includesFrom(testConfigProps, "minitez.query.files.shared"); - excludesFrom(testConfigProps, "minillap.query.files"); - excludesFrom(testConfigProps, "minillap.shared.query.files"); - - setResultsDir("ql/src/test/results/clientpositive/tez"); - setLogDir("itests/qtest/target/qfile-results/clientpositive"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir("data/conf/tez"); - setClusterType(MiniClusterType.tez); - setMetastoreType(MetastoreType.hbase); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class MiniLlapCliConfig extends AbstractCliConfig { - public MiniLlapCliConfig() { - super(CoreCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientpositive"); - - includesFrom(testConfigProps, "minillap.query.files"); - includesFrom(testConfigProps, "minillap.shared.query.files"); - - setResultsDir("ql/src/test/results/clientpositive/llap"); - setLogDir("itests/qtest/target/qfile-results/clientpositive"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir("data/conf/llap"); - setClusterType(MiniClusterType.llap); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class EncryptedHDFSCliConfig extends AbstractCliConfig { - public EncryptedHDFSCliConfig() { - super(CoreCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientpositive"); - - includesFrom(testConfigProps, "encrypted.query.files"); - - setResultsDir("ql/src/test/results/clientpositive/encrypted"); - setLogDir("itests/qtest/target/qfile-results/clientpositive"); - - setInitScript("q_test_init_for_encryption.sql"); - setCleanupScript("q_test_cleanup_for_encryption.sql"); - - setHiveConfDir("data/conf"); - setClusterType(MiniClusterType.encrypted); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class ContribCliConfig extends AbstractCliConfig { - public ContribCliConfig() { - super(CoreCliDriver.class); - try { - setQueryDir("contrib/src/test/queries/clientpositive"); - - setResultsDir("contrib/src/test/results/clientpositive"); - setLogDir("itests/qtest/target/qfile-results/contribclientpositive"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class PerfCliConfig extends AbstractCliConfig { - public PerfCliConfig() { - super(CorePerfCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientpositive/perf"); - - excludesFrom(testConfigProps, "minimr.query.files"); - excludesFrom(testConfigProps, "minitez.query.files"); - excludesFrom(testConfigProps, "encrypted.query.files"); - - setResultsDir("ql/src/test/results/clientpositive/perf/"); - setLogDir("itests/qtest/target/qfile-results/clientpositive/"); - - setInitScript("q_perf_test_init.sql"); - setCleanupScript("q_perf_test_cleanup.sql"); - - setHiveConfDir("data/conf/perf-reg/"); - setClusterType(MiniClusterType.tez); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class CompareCliConfig extends AbstractCliConfig { - public CompareCliConfig() { - super(CoreCompareCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientcompare"); - - setResultsDir("ql/src/test/results/clientcompare"); - setLogDir("itests/qtest/target/qfile-results/clientcompare"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.none); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class NegativeCliConfig extends AbstractCliConfig { - public NegativeCliConfig() { - super(CoreNegativeCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientnegative"); - - excludesFrom(testConfigProps, "minimr.query.negative.files"); - excludeQuery("authorization_uri_import.q"); - - setResultsDir("ql/src/test/results/clientnegative"); - setLogDir("itests/qtest/target/qfile-results/clientnegative"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.none); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class NegativeMinimrCli extends AbstractCliConfig { - public NegativeMinimrCli() { - super(CoreNegativeCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientnegative"); - - includesFrom(testConfigProps, "minimr.query.negative.files"); - - setResultsDir("ql/src/test/results/clientnegative"); - setLogDir("itests/qtest/target/qfile-results/clientnegative"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.mr); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class HBaseCliConfig extends AbstractCliConfig { - public HBaseCliConfig() { - super(CoreHBaseCliDriver.class); - try { - setQueryDir("hbase-handler/src/test/queries/positive"); - - setResultsDir("hbase-handler/src/test/results/positive"); - setLogDir("itests/qtest/target/qfile-results/hbase-handler/positive"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.none); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class HBaseMinimrCliConfig extends AbstractCliConfig { - public HBaseMinimrCliConfig() { - super(CoreHBaseCliDriver.class); - try { - setQueryDir("hbase-handler/src/test/queries/positive"); - // XXX: i think this was non intentionally set to run only hbase_bulk.m??? - // includeQuery("hbase_bulk.m"); => will be filter out because not ends with .q - // to keep existing behaviour i added this method - overrideUserQueryFile("hbase_bulk.m"); - - setResultsDir("hbase-handler/src/test/results/positive"); - setLogDir("itests/qtest/target/qfile-results/hbase-handler/minimrpositive"); - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - setHiveConfDir(""); - setClusterType(MiniClusterType.mr); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class DummyConfig extends AbstractCliConfig { - public DummyConfig() { - super(CoreDummy.class); - try { - setQueryDir("ql/src/test/queries/clientcompare"); - - setResultsDir("ql/src/test/results/clientcompare"); - setLogDir("itests/qtest/target/qfile-results/clientcompare"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.none); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class HBaseNegativeCliConfig extends AbstractCliConfig { - public HBaseNegativeCliConfig() { - super(CoreHBaseNegativeCliDriver.class); - try { - setQueryDir("hbase-handler/src/test/queries/negative"); - - setResultsDir("hbase-handler/src/test/results/negative"); - setLogDir("itests/qtest/target/qfile-results/hbase-handler/negative"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.none); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class ContribNegativeCliConfig extends AbstractCliConfig { - public ContribNegativeCliConfig() { - super(CoreNegativeCliDriver.class); - try { - setQueryDir("contrib/src/test/queries/clientnegative"); - - setResultsDir("contrib/src/test/results/clientnegative"); - setLogDir("itests/qtest/target/qfile-results/contribclientnegative"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.none); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class BeeLineConfig extends AbstractCliConfig { - public BeeLineConfig() { - // FIXME: beeline is disabled... - super(null); - // super(CoreBeeLineDriver.class); - try { - setQueryDir("ql/src/test/queries/clientpositive"); - - excludesFrom(testConfigProps, "beeline.positive.exclude"); - - setResultsDir("ql/src/test/results/clientpositive"); - setLogDir("itests/qtest/target/qfile-results/beelinepositive"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.none); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class AccumuloCliConfig extends AbstractCliConfig { - public AccumuloCliConfig() { - super(CoreAccumuloCliDriver.class); - try { - setQueryDir("accumulo-handler/src/test/queries/positive"); - - excludesFrom(testConfigProps, "beeline.positive.exclude"); - - setResultsDir("accumulo-handler/src/test/results/positive"); - setLogDir("itests/qtest/target/qfile-results/accumulo-handler/positive"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir(""); - setClusterType(MiniClusterType.none); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class SparkCliConfig extends AbstractCliConfig { - public SparkCliConfig() { - super(CoreCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientpositive"); - - includesFrom(testConfigProps, "spark.query.files"); - - setResultsDir("ql/src/test/results/clientpositive/spark"); - setLogDir("itests/qtest-spark/target/qfile-results/clientpositive/spark"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir("data/conf/spark/standalone"); - setClusterType(MiniClusterType.spark); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class SparkOnYarnCliConfig extends AbstractCliConfig { - public SparkOnYarnCliConfig() { - super(CoreCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientpositive"); - - includesFrom(testConfigProps, "miniSparkOnYarn.query.files"); - - setResultsDir("ql/src/test/results/clientpositive/spark"); - setLogDir("itests/qtest-spark/target/qfile-results/clientpositive/spark"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir("data/conf/spark/yarn-client"); - setClusterType(MiniClusterType.miniSparkOnYarn); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class SparkNegativeCliConfig extends AbstractCliConfig { - public SparkNegativeCliConfig() { - super(CoreNegativeCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientnegative"); - - includesFrom(testConfigProps, "spark.query.negative.files"); - - setResultsDir("ql/src/test/results/clientnegative/spark"); - setLogDir("itests/qtest-spark/target/qfile-results/clientnegative/spark"); - - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); - - setHiveConfDir("data/conf/spark/standalone"); - setClusterType(MiniClusterType.spark); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } -} diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java deleted file mode 100644 index a5d2711..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java +++ /dev/null @@ -1,111 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli.control; - -import static org.junit.Assert.assertTrue; -import org.apache.hadoop.hive.accumulo.AccumuloQTestUtil; -import org.apache.hadoop.hive.accumulo.AccumuloTestSetup; -import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; - -public class CoreAccumuloCliDriver extends CliAdapter { - - private AccumuloQTestUtil qt; - private static AccumuloTestSetup setup; - - public CoreAccumuloCliDriver(AbstractCliConfig cliConfig) { - super(cliConfig); - } - - @Override - @BeforeClass - public void beforeClass() { - setup = new AccumuloTestSetup(); - } - @Override - @AfterClass - public void shutdown() throws Exception { - // FIXME: possibly missing - // setup.tearDown(); - } - @Override - @Before - public void setUp() { - - MiniClusterType miniMR = cliConfig.getClusterType(); - String initScript = cliConfig.getInitScript(); - String cleanupScript = cliConfig.getCleanupScript(); - - try { - qt = new AccumuloQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, - setup, initScript, cleanupScript); - } catch (Exception e) { - throw new RuntimeException("Unexpected exception in setUp",e); - } - } - - @Override - @After - public void tearDown() { - try { - qt.shutdown(); - } - catch (Exception e) { - throw new RuntimeException("Unexpected exception in tearDown",e); - } - } - - @Override - public void runTest(String tname, String fname, String fpath) throws Exception { - long startTime = System.currentTimeMillis(); - try { - System.err.println("Begin query: " + fname); - - qt.addFile(fpath); - - if (qt.shouldBeSkipped(fname)) { - System.err.println("Test " + fname + " skipped"); - return; - } - - qt.cliInit(fname); - qt.clearTestSideEffects(); - int ecode = qt.executeClient(fname); - if (ecode != 0) { - qt.failed(ecode, fname, null); - } - - ecode = qt.checkCliDriverResults(fname); - if (ecode != 0) { - qt.failedDiff(ecode, fname, null); - } - qt.clearPostTestEffects(); - - } catch (Throwable e) { - qt.failed(e, fname, null); - } - - long elapsedTime = System.currentTimeMillis() - startTime; - System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s"); - assertTrue("Test passed", true); - } -} - diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java deleted file mode 100644 index e5144e3..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java +++ /dev/null @@ -1,171 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli.control; -//beeline is excluded by default -//AFAIK contains broken tests -//and produces compile errors...i'll comment out this whole class for now... -/* - -import static org.junit.Assert.fail; -import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.*; - -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.QTestUtil; -import org.apache.hive.beeline.util.QFileClient; -import org.apache.hive.service.server.HiveServer2; -import org.junit.AfterClass; -import org.junit.BeforeClass; -// HIVE-14444: i've dropped this: @RunWith(ConcurrentTestRunner.class) -public class CoreBeeLineDriver extends CliAdapter { - private final String hiveRootDirectory = AbstractCliConfig.HIVE_ROOT; - private final String queryDirectory; - private final String logDirectory; - private final String resultsDirectory; - private boolean overwrite = false; - private static String scratchDirectory; - private static QTestUtil.QTestSetup miniZKCluster = null; - - private static HiveServer2 hiveServer2; - - public CoreBeeLineDriver(AbstractCliConfig testCliConfig) { - super(testCliConfig); - queryDirectory = testCliConfig.getQueryDirectory(); - logDirectory = testCliConfig.getLogDir(); - resultsDirectory = testCliConfig.getResultsDir(); - } - - @Override - @BeforeClass - public void beforeClass() throws Exception { - HiveConf hiveConf = new HiveConf(); - hiveConf.logVars(System.err); - System.err.flush(); - - scratchDirectory = hiveConf.getVar(SCRATCHDIR); - - String testOutputOverwrite = System.getProperty("test.output.overwrite"); - if (testOutputOverwrite != null && "true".equalsIgnoreCase(testOutputOverwrite)) { - overwrite = true; - } - - miniZKCluster = new QTestUtil.QTestSetup(); - miniZKCluster.preTest(hiveConf); - - System.setProperty("hive.zookeeper.quorum", - hiveConf.get("hive.zookeeper.quorum")); - System.setProperty("hive.zookeeper.client.port", - hiveConf.get("hive.zookeeper.client.port")); - - String disableserver = System.getProperty("test.service.disable.server"); - if (null != disableserver && disableserver.equalsIgnoreCase("true")) { - System.err.println("test.service.disable.server=true " - + "Skipping HiveServer2 initialization!"); - return; - } - - hiveServer2 = new HiveServer2(); - hiveServer2.init(hiveConf); - System.err.println("Starting HiveServer2..."); - hiveServer2.start(); - Thread.sleep(5000); - } - - - @Override - @AfterClass - public void shutdown() { - try { - if (hiveServer2 != null) { - System.err.println("Stopping HiveServer2..."); - hiveServer2.stop(); - } - } catch (Throwable t) { - t.printStackTrace(); - } - - if (miniZKCluster != null) { - try { - miniZKCluster.tearDown(); - } catch (Exception e) { - e.printStackTrace(); - } - } - } - - public void runTest(String qFileName) throws Exception { - QFileClient qClient = new QFileClient(new HiveConf(), hiveRootDirectory, - queryDirectory, logDirectory, resultsDirectory) - .setQFileName(qFileName) - .setUsername("user") - .setPassword("password") - .setJdbcUrl("jdbc:hive2://localhost:10000") - .setJdbcDriver("org.apache.hive.jdbc.HiveDriver") - .setTestDataDirectory(hiveRootDirectory + "/data/files") - .setTestScriptDirectory(hiveRootDirectory + "/data/scripts"); - - long startTime = System.currentTimeMillis(); - System.err.println(">>> STARTED " + qFileName - + " (Thread " + Thread.currentThread().getName() + ")"); - try { - qClient.run(); - } catch (Exception e) { - System.err.println(">>> FAILED " + qFileName + " with exception:"); - e.printStackTrace(); - throw e; - } - long elapsedTime = (System.currentTimeMillis() - startTime)/1000; - String time = "(" + elapsedTime + "s)"; - - if (qClient.compareResults()) { - System.err.println(">>> PASSED " + qFileName + " " + time); - } else { - if (qClient.hasErrors()) { - System.err.println(">>> FAILED " + qFileName + " (ERROR) " + time); - fail(); - } - if (overwrite) { - System.err.println(">>> PASSED " + qFileName + " (OVERWRITE) " + time); - qClient.overwriteResults(); - } else { - System.err.println(">>> FAILED " + qFileName + " (DIFF) " + time); - fail(); - } - } - } - - @Override - public void setUp() { - // TODO Auto-generated method stub - - } - - @Override - public void tearDown() { - // TODO Auto-generated method stub - - } - - @Override - public void runTest(String name, String name2, String absolutePath) throws Exception { - runTest(name2); - } - -} - - -*/ \ No newline at end of file diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java deleted file mode 100644 index db58f1d..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java +++ /dev/null @@ -1,194 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli.control; - -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.concurrent.TimeUnit; - -import com.google.common.base.Stopwatch; -import org.apache.hadoop.hive.cli.control.AbstractCliConfig.MetastoreType; -import org.apache.hadoop.hive.ql.QTestUtil; -import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; -import org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class CoreCliDriver extends CliAdapter { - - private static final Logger LOG = LoggerFactory.getLogger(CoreCliDriver.class); - private static QTestUtil qt; - - public CoreCliDriver(AbstractCliConfig testCliConfig) { - super(testCliConfig); - } - - @Override - @BeforeClass - public void beforeClass() { - String message = "Starting " + CoreCliDriver.class.getName() + " run at " + System.currentTimeMillis(); - LOG.info(message); - System.err.println(message); - final MiniClusterType miniMR =cliConfig.getClusterType(); - final String hiveConfDir = cliConfig.getHiveConfDir(); - final String initScript = cliConfig.getInitScript(); - final String cleanupScript = cliConfig.getCleanupScript(); - final boolean useHBaseMetastore = cliConfig.getMetastoreType() == MetastoreType.hbase; - try { - final String hadoopVer = cliConfig.getHadoopVersion(); - - qt = new ElapsedTimeLoggingWrapper() { - @Override - public QTestUtil invokeInternal() throws Exception { - return new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, - hiveConfDir, hadoopVer, initScript, cleanupScript, useHBaseMetastore, true); - } - }.invoke("QtestUtil instance created", LOG, true); - - // do a one time initialization - new ElapsedTimeLoggingWrapper() { - @Override - public Void invokeInternal() throws Exception { - qt.cleanUp(); - return null; - } - }.invoke("Initialization cleanup done.", LOG, true); - - new ElapsedTimeLoggingWrapper() { - @Override - public Void invokeInternal() throws Exception { - qt.createSources(); - return null; - } - }.invoke("Initialization createSources done.", LOG, true); - - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - throw new RuntimeException("Unexpected exception in static initialization",e); - } - } - - @Override - @Before - public void setUp() { - try { - new ElapsedTimeLoggingWrapper() { - @Override - public Void invokeInternal() throws Exception { - qt.clearTestSideEffects(); - return null; - } - }.invoke("PerTestSetup done.", LOG, false); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in setup"); - } - } - - @Override - @After - public void tearDown() { - try { - new ElapsedTimeLoggingWrapper() { - @Override - public Void invokeInternal() throws Exception { - qt.clearPostTestEffects(); - return null; - } - }.invoke("PerTestTearDown done.", LOG, false); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in tearDown"); - } - } - - @Override - @AfterClass - public void shutdown() throws Exception { - try { - new ElapsedTimeLoggingWrapper() { - @Override - public Void invokeInternal() throws Exception { - qt.shutdown(); - return null; - } - }.invoke("Teardown done.", LOG, false); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in shutdown"); - } - } - - static String debugHint = "\nSee ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, " - + "or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs."; - - @Override - public void runTest(String tname, String fname, String fpath) throws Exception { - Stopwatch sw = new Stopwatch().start(); - boolean skipped = false; - boolean failed = false; - try { - LOG.info("Begin query: " + fname); - System.err.println("Begin query: " + fname); - - qt.addFile(fpath); - - if (qt.shouldBeSkipped(fname)) { - LOG.info("Test " + fname + " skipped"); - System.err.println("Test " + fname + " skipped"); - skipped = true; - return; - } - - qt.cliInit(fname, false); - int ecode = qt.executeClient(fname); - if (ecode != 0) { - failed = true; - qt.failed(ecode, fname, debugHint); - } - ecode = qt.checkCliDriverResults(fname); - if (ecode != 0) { - failed = true; - qt.failedDiff(ecode, fname, debugHint); - } - } - catch (Throwable e) { - failed = true; - qt.failed(e, fname, debugHint); - } finally { - String message = "Done query" + fname + ". succeeded=" + !failed + ", skipped=" + skipped + - ". ElapsedTime(ms)=" + sw.stop().elapsed(TimeUnit.MILLISECONDS); - LOG.info(message); - System.err.println(message); - } - assertTrue("Test passed", true); - } -} diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java deleted file mode 100644 index 71a02bc..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java +++ /dev/null @@ -1,158 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli.control; - -import static org.junit.Assert.fail; - -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.hive.ql.QTestUtil; -import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -public class CoreCompareCliDriver extends CliAdapter{ - - private static QTestUtil qt; - public CoreCompareCliDriver(AbstractCliConfig testCliConfig) { - super(testCliConfig); - } - - - @Override - @BeforeClass - public void beforeClass() { - - MiniClusterType miniMR = cliConfig.getClusterType(); - String hiveConfDir = cliConfig.getHiveConfDir(); - String initScript = cliConfig.getInitScript(); - String cleanupScript = cliConfig.getCleanupScript(); - try { - String hadoopVer = cliConfig.getHadoopVersion(); - qt = new QTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, - hiveConfDir, hadoopVer, initScript, cleanupScript, false, false); - - // do a one time initialization - qt.cleanUp(); - qt.createSources(); - - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in static initialization"); - } - } - - @Override - @Before - public void setUp() { - try { - qt.clearTestSideEffects(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in setup"); - } - } - - @Override - @After - public void tearDown() { - try { - qt.clearPostTestEffects(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in tearDown"); - } - } - - @Override - @AfterClass - public void shutdown() throws Exception { - try { - qt.shutdown(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in shutdown"); - } - } - - private Map> versionFiles = new HashMap<>(); - - static String debugHint = "\nSee ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, " - + "or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs."; - - @Override - public void runTest(String tname, String fname, String fpath) throws Exception { - final String queryDirectory = cliConfig.getQueryDirectory(); - - long startTime = System.currentTimeMillis(); - try { - System.err.println("Begin query: " + fname); - // TODO: versions could also be picked at build time. - List versionFiles = QTestUtil.getVersionFiles(queryDirectory, tname); - if (versionFiles.size() < 2) { - fail("Cannot run " + tname + " with only " + versionFiles.size() + " versions"); - } - - qt.addFile(fpath); - for (String versionFile : versionFiles) { - qt.addFile(new File(queryDirectory, versionFile), true); - } - - if (qt.shouldBeSkipped(fname)) { - return; - } - - int ecode = 0; - List outputs = new ArrayList<>(versionFiles.size()); - for (String versionFile : versionFiles) { - // 1 for "_" after tname; 3 for ".qv" at the end. Version is in between. - String versionStr = versionFile.substring(tname.length() + 1, versionFile.length() - 3); - outputs.add(qt.cliInit(tname + "." + versionStr, false)); - // TODO: will this work? - ecode = qt.executeClient(versionFile, fname); - if (ecode != 0) { - qt.failed(ecode, fname, debugHint); - } - } - - ecode = qt.checkCompareCliDriverResults(fname, outputs); - if (ecode != 0) { - qt.failedDiff(ecode, fname, debugHint); - } - } - catch (Throwable e) { - qt.failed(e, fname, debugHint); - } - - long elapsedTime = System.currentTimeMillis() - startTime; - System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s"); - } -} diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java deleted file mode 100644 index b7afb48..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli.control; - -import static org.junit.Assert.fail; - -import java.util.List; - -import org.apache.hadoop.hive.ql.QTestUtil; - -public class CoreDummy extends CliAdapter { - - public CoreDummy(AbstractCliConfig cliConfig) { - super(cliConfig); - } - - @Override - public void beforeClass() { - } - - @Override - public void setUp() { - } - - @Override - public void tearDown() { - } - - @Override - public void shutdown() throws Exception { - } - - @Override - public void runTest(String name, String name2, String absolutePath) throws Exception { - List versionFiles = QTestUtil.getVersionFiles(cliConfig.getQueryDirectory(), name); - if (versionFiles.size() < 2) { - fail("Cannot run " + name2 + " with only " + versionFiles.size() + " versions"); - } - fail("x"); - } - -} diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java deleted file mode 100644 index 956a42d..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java +++ /dev/null @@ -1,137 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli.control; - -import static org.apache.hadoop.hive.cli.control.AbstractCliConfig.HIVE_ROOT; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import org.apache.hadoop.hive.hbase.HBaseQTestUtil; -import org.apache.hadoop.hive.hbase.HBaseTestSetup; -import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; - -public class CoreHBaseCliDriver extends CliAdapter { - - private HBaseQTestUtil qt; - private HBaseTestSetup setup = new HBaseTestSetup(); - - public CoreHBaseCliDriver(AbstractCliConfig testCliConfig) { - super(testCliConfig); - } - - @Override - @BeforeClass - public void beforeClass() { - MiniClusterType miniMR = cliConfig.getClusterType(); - String initScript = cliConfig.getInitScript(); - String cleanupScript =cliConfig.getCleanupScript(); - - try { - qt = new HBaseQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, - setup, initScript, cleanupScript); - qt.cleanUp(null); - qt.createSources(null); - - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in static initialization: "+e.getMessage()); - } - - } - - @Override - @Before - public void setUp() { - try { - qt.clearTestSideEffects(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in setup"); - } - } - @Override - @After - public void tearDown() { - try { - qt.clearPostTestEffects(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in tearDown"); - } - } - - @Override - @AfterClass - public void shutdown() throws Exception { - try { - // FIXME: there were 2 afterclass methods...i guess this is the right order...maybe not - qt.shutdown(); - setup.tearDown(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in shutdown"); - } - } - - @Override - public void runTest(String tname, String fname, String fpath) throws Exception { - long startTime = System.currentTimeMillis(); - try { - System.err.println("Begin query: " + fname); - - qt.addFile(fpath); - - if (qt.shouldBeSkipped(fname)) { - System.err.println("Test " + fname + " skipped"); - return; - } - - qt.cliInit(fname, false); - - int ecode = qt.executeClient(fname); - if (ecode != 0) { - qt.failed(ecode, fname, null); - } - - ecode = qt.checkCliDriverResults(fname); - if (ecode != 0) { - qt.failedDiff(ecode, fname, null); - } - - } catch (Throwable e) { - qt.failed(e, fname, null); - } - - long elapsedTime = System.currentTimeMillis() - startTime; - System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s"); - assertTrue("Test passed", true); - } -} - diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java deleted file mode 100644 index 6225180..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java +++ /dev/null @@ -1,121 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.cli.control; - -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import org.apache.hadoop.hive.hbase.HBaseQTestUtil; -import org.apache.hadoop.hive.hbase.HBaseTestSetup; -import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; - -public class CoreHBaseNegativeCliDriver extends CliAdapter { - - private HBaseQTestUtil qt; - private static HBaseTestSetup setup = new HBaseTestSetup(); - - public CoreHBaseNegativeCliDriver(AbstractCliConfig testCliConfig) { - super(testCliConfig); - } - - @Override - public void beforeClass() throws Exception { - } - - // hmm..this looks a bit wierd...setup boots qtestutil...this part used to be in beforeclass - @Override - @Before - public void setUp() { - - MiniClusterType miniMR = cliConfig.getClusterType(); - String initScript = cliConfig.getInitScript(); - String cleanupScript = cliConfig.getCleanupScript(); - - try { - qt = new HBaseQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, - setup, initScript, cleanupScript); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in setup"); - } - } - - @Override - @After - public void tearDown() { - try { - qt.shutdown(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in tearDown"); - } - } - - @Override - @AfterClass - public void shutdown() throws Exception { - // closeHBaseConnections - setup.tearDown(); - } - - @Override - public void runTest(String tname, String fname, String fpath) throws Exception { - long startTime = System.currentTimeMillis(); - try { - System.err.println("Begin query: " + fname); - - qt.addFile(fpath); - - if (qt.shouldBeSkipped(fname)) { - System.err.println("Test " + fname + " skipped"); - return; - } - - qt.cliInit(fname); - qt.clearTestSideEffects(); - int ecode = qt.executeClient(fname); - if (ecode == 0) { - qt.failed(fname, null); - } - - ecode = qt.checkCliDriverResults(fname); - if (ecode != 0) { - qt.failedDiff(ecode, fname, null); - } - qt.clearPostTestEffects(); - - } catch (Throwable e) { - qt.failed(e, fname, null); - } - - long elapsedTime = System.currentTimeMillis() - startTime; - System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s"); - assertTrue("Test passed", true); - } - - -} - diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java deleted file mode 100644 index 65b2ce7..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java +++ /dev/null @@ -1,139 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli.control; - -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import org.apache.hadoop.hive.ql.QTestUtil; -import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; - -public class CoreNegativeCliDriver extends CliAdapter{ - - private QTestUtil qt; - public CoreNegativeCliDriver(AbstractCliConfig testCliConfig) { - super(testCliConfig); - } - - @Override - public void beforeClass(){ - MiniClusterType miniMR = cliConfig.getClusterType(); - String hiveConfDir = cliConfig.getHiveConfDir(); - String initScript = cliConfig.getInitScript(); - String cleanupScript = cliConfig.getCleanupScript(); - - try { - String hadoopVer = cliConfig.getHadoopVersion(); - qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, - hiveConfDir, hadoopVer, initScript, cleanupScript, false, false); - // do a one time initialization - qt.cleanUp(); - qt.createSources(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in static initialization"); - } - } - - @Override - @Before - public void setUp() { - try { - qt.clearTestSideEffects(); - } catch (Throwable e) { - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in setup"); - } - } - - @Override - @After - public void tearDown() { - try { - qt.clearPostTestEffects(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in tearDown"); - } - } - - @Override - @AfterClass - public void shutdown() throws Exception { - try { - qt.shutdown(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in shutdown"); - } - } - - /** - * Dummy last test. This is only meant to shutdown qt - */ - public void testNegativeCliDriver_shutdown() { - System.err.println ("Cleaning up " + "$className"); - } - - static String debugHint = "\nSee ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, " - + "or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs."; - - - @Override - public void runTest(String tname, String fname, String fpath) throws Exception { - long startTime = System.currentTimeMillis(); - try { - System.err.println("Begin query: " + fname); - - qt.addFile(fpath); - - if (qt.shouldBeSkipped(fname)) { - System.err.println("Test " + fname + " skipped"); - return; - } - - qt.cliInit(fname, false); - int ecode = qt.executeClient(fname); - if (ecode == 0) { - qt.failed(fname, debugHint); - } - - ecode = qt.checkCliDriverResults(fname); - if (ecode != 0) { - qt.failedDiff(ecode, fname, debugHint); - } - } - catch (Throwable e) { - qt.failed(e, fname, debugHint); - } - - long elapsedTime = System.currentTimeMillis() - startTime; - System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s"); - assertTrue("Test passed", true); - } -} diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java deleted file mode 100644 index 8620cde..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java +++ /dev/null @@ -1,138 +0,0 @@ -package org.apache.hadoop.hive.cli.control; -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - - -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import org.apache.hadoop.hive.ql.QTestUtil; -import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; -import org.junit.After; -import org.junit.AfterClass; -/** - This is the TestPerformance Cli Driver for integrating performance regression tests - as part of the Hive Unit tests. - Currently this includes support for : - 1. Running explain plans for TPCDS workload (non-partitioned dataset) on 30TB scaleset. - TODO : - 1. Support for partitioned data set - 2. Use HBase Metastore instead of Derby - -This suite differs from TestCliDriver w.r.t the fact that we modify the underlying metastore -database to reflect the dataset before running the queries. -*/ -public class CorePerfCliDriver extends CliAdapter{ - - private static QTestUtil qt; - - public CorePerfCliDriver(AbstractCliConfig testCliConfig) { - super(testCliConfig); - } - - @Override - public void beforeClass() { - System.setProperty("datanucleus.schema.autoCreateAll", "true"); - System.setProperty("hive.metastore.schema.verification", "false"); - MiniClusterType miniMR = cliConfig.getClusterType(); - String hiveConfDir = cliConfig.getHiveConfDir(); - String initScript = cliConfig.getInitScript(); - String cleanupScript = cliConfig.getCleanupScript(); - try { - String hadoopVer = cliConfig.getHadoopVersion(); - qt = new QTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, hiveConfDir, - hadoopVer, initScript, - cleanupScript, false, false); - - // do a one time initialization - qt.cleanUp(); - qt.createSources(); - // Manually modify the underlying metastore db to reflect statistics corresponding to - // the 30TB TPCDS scale set. This way the optimizer will generate plans for a 30 TB set. - QTestUtil.setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(qt.getConf()); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - throw new RuntimeException("Unexpected exception in static initialization: " + e.getMessage(), - e); - } - } - - @Override - @AfterClass - public void shutdown() throws Exception { - qt.shutdown(); - } - - @Override - public void setUp() { - } - - @Override - @After - public void tearDown() { - try { - qt.clearPostTestEffects(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - fail("Unexpected exception in tearDown"); - } - } - - static String debugHint = - "\nSee ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, " - + "or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs."; - - - @Override - public void runTest(String name, String fname, String fpath) throws Exception { - long startTime = System.currentTimeMillis(); - try { - System.err.println("Begin query: " + fname); - - qt.addFile(fpath); - - if (qt.shouldBeSkipped(fname)) { - return; - } - - qt.cliInit(fname, false); - - int ecode = qt.executeClient(fname); - if (ecode != 0) { - qt.failed(ecode, fname, debugHint); - } - ecode = qt.checkCliDriverResults(fname); - if (ecode != 0) { - qt.failedDiff(ecode, fname, debugHint); - } - } catch (Throwable e) { - qt.failed(e, fname, debugHint); - } - - long elapsedTime = System.currentTimeMillis() - startTime; - System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime / 1000 + "s"); - assertTrue("Test passed", true); - } - - -} diff --git itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java index 01faaba..cbe9645 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hive.cli.control.CliConfig; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.QTestUtil; @@ -41,6 +42,11 @@ private HBaseTestSetup hbaseSetup = null; + public HBaseQTestUtil(CliConfig cliConfig, HBaseTestSetup setup) throws Exception { + this(cliConfig.getResultsDir(), cliConfig.getLogDir(), cliConfig.getClusterType(), setup, + cliConfig.getInitScript(), cliConfig.getCleanupScript()); + } + public HBaseQTestUtil( String outDir, String logDir, MiniClusterType miniMr, HBaseTestSetup setup, String initScript, String cleanupScript) diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 4d4a929..82c200b 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -32,6 +32,7 @@ import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; @@ -51,7 +52,6 @@ import java.util.Comparator; import java.util.Deque; import java.util.HashSet; -import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -63,6 +63,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import com.google.common.base.Stopwatch; +import com.google.common.collect.Sets; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; @@ -77,7 +79,7 @@ import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.hive.cli.CliDriver; import org.apache.hadoop.hive.cli.CliSessionState; -import org.apache.hadoop.hive.cli.control.AbstractCliConfig; +import org.apache.hadoop.hive.cli.control.CliConfig; import org.apache.hadoop.hive.common.io.CachingPrintStream; import org.apache.hadoop.hive.common.io.DigestPrintStream; import org.apache.hadoop.hive.common.io.SortAndDigestPrintStream; @@ -85,9 +87,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.llap.LlapItUtils; -import org.apache.hadoop.hive.llap.configuration.LlapDaemonConfiguration; import org.apache.hadoop.hive.llap.daemon.MiniLlapCluster; -import org.apache.hadoop.hive.llap.daemon.impl.LlapDaemon; import org.apache.hadoop.hive.llap.io.api.LlapProxy; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.Index; @@ -146,6 +146,7 @@ private final static String defaultInitScript = "q_test_init.sql"; private final static String defaultCleanupScript = "q_test_cleanup.sql"; private final String[] testOnlyCommands = new String[]{"crypto"}; + private static String hiveRootDir = null; private String testWarehouse; private final String testFiles; @@ -185,6 +186,11 @@ private final String cleanupScript; private boolean useHBaseMetastore = false; + protected static final String DEBUG_HINT = "\nSee ./ql/target/tmp/log/hive.log or " + + "./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or " + + "./itests/qtest/target/surefire-reports/ for specific test cases logs."; + + public interface SuiteAddTestFunctor { public void addTestToSuite(TestSuite suite, Object setup, String tName); } @@ -361,8 +367,7 @@ public static MiniClusterType valueForString(String type) { private String getKeyProviderURI() { // Use the target directory if it is not specified - String HIVE_ROOT = AbstractCliConfig.HIVE_ROOT; - String keyDir = HIVE_ROOT + "ql/target/"; + String keyDir = getHiveRootDir() + "ql/target/"; // put the jks file in the current test path only for test purpose return "jceks://file" + new Path(keyDir, "test.jks").toUri(); @@ -384,6 +389,14 @@ private void startMiniHBaseCluster() throws Exception { initHBaseMetastoreMethod.invoke(null, admin, conf); } + public QTestUtil(CliConfig cliConfig) throws Exception { + this(cliConfig.getResultsDir(), cliConfig.getLogDir(), cliConfig.getClusterType(), + cliConfig.getHiveConfDir() != null && !cliConfig.getHiveConfDir().isEmpty() ? + getHiveRootDir() + cliConfig.getHiveConfDir() : "", cliConfig.getHadoopVersion(), + cliConfig.getInitScript(), cliConfig.getCleanupScript(), + cliConfig.getMetastoreType() == CliConfig.MetastoreType.hbase, cliConfig.isWithLlapIo()); + } + public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, String confDir, String hadoopVer, String initScript, String cleanupScript, boolean useHBaseMetastore, boolean withLlapIo) @@ -395,12 +408,12 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, this.outDir = outDir; this.logDir = logDir; this.useHBaseMetastore = useHBaseMetastore; - this.srcTables=getSrcTables(); + this.srcTables = getSrcTables(); // HIVE-14443 move this fall-back logic to CliConfigs if (confDir != null && !confDir.isEmpty()) { - HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml")); - System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation()); + HiveConf.setHiveSiteLocation(new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); + System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); } queryState = new QueryState(new HiveConf(Driver.class)); @@ -802,10 +815,10 @@ public void clearTablesCreatedDuringTests() throws Exception { SessionState.get().setCurrentDatabase(DEFAULT_DATABASE_NAME); List roleNames = db.getAllRoleNames(); - for (String roleName : roleNames) { - if (!"PUBLIC".equalsIgnoreCase(roleName) && !"ADMIN".equalsIgnoreCase(roleName)) { - db.dropRole(roleName); - } + for (String roleName : roleNames) { + if (!"PUBLIC".equalsIgnoreCase(roleName) && !"ADMIN".equalsIgnoreCase(roleName)) { + db.dropRole(roleName); + } } } @@ -1026,7 +1039,7 @@ public String cliInit(String tname, boolean recreate) throws Exception { cliDriver = new CliDriver(); if (tname.equals("init_file.q")) { - ss.initFiles.add(AbstractCliConfig.HIVE_ROOT + "/data/scripts/test_init_file.sql"); + ss.initFiles.add(getHiveRootDir() + "/data/scripts/test_init_file.sql"); } cliDriver.processInitFiles(ss); @@ -2058,10 +2071,11 @@ public static void setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(HiveConf c LOG.debug("Connected to metastore database "); } - String mdbPath = AbstractCliConfig.HIVE_ROOT + "/data/files/tpcds-perf/metastore_export/"; + String mdbPath = getHiveRootDir() + "/data/files/tpcds-perf/metastore_export/"; // Setup the table column stats - BufferedReader br = new BufferedReader(new FileReader(new File(AbstractCliConfig.HIVE_ROOT + "/metastore/scripts/upgrade/derby/022-HIVE-11107.derby.sql"))); + BufferedReader br = new BufferedReader(new FileReader(new File(getHiveRootDir() + + "/metastore/scripts/upgrade/derby/022-HIVE-11107.derby.sql"))); String command; s.execute("DROP TABLE APP.TABLE_PARAMS"); @@ -2197,4 +2211,253 @@ public int compare(String str1, String str2) { } } + /** + * Calculate the hive soure root directory. Looks for a directory with itests, ql, metastore + * subdirectories. Starts from hive.root, and if not found then the current path + * @return The hive source root directory + * @exception RuntimeException if no directory found. + */ + public static String getHiveRootDir() { + if (hiveRootDir != null) { + return hiveRootDir; + } + List candidateSiblings = new ArrayList<>(); + if (System.getProperty("hive.root") != null) { + try { + candidateSiblings.add(new File(System.getProperty("hive.root")).getCanonicalPath()); + } catch (IOException e) { + throw new RuntimeException("error getting hive.root",e); + } + } + candidateSiblings.add(new File(".").getAbsolutePath()); + + for (String string : candidateSiblings) { + File curr = new File(string); + do { + Set lls = Sets.newHashSet(curr.list()); + if (lls.contains("itests") && lls.contains("ql") && lls.contains("metastore")) { + System.out.println("detected hiveRoot: " + curr); + hiveRootDir = QTestUtil.ensurePathEndsInSlash(curr.getAbsolutePath()); + return hiveRootDir; + } + curr = curr.getParentFile(); + } while (curr != null); + } + throw new RuntimeException("unable to find hiveRoot"); + } + + /** + * Tries to find out the hadoop version from the META-INF property file + * (/META-INF/maven/org.apache.hadoop/hadoop-hdfs/pom.properties) + * @return The hadoop version information from the property file, null if there is an error + */ + public static String getHadoopVersionFromJar() { + String hadoopPropsLoc = "/META-INF/maven/org.apache.hadoop/hadoop-hdfs/pom.properties"; + URL hadoopPropsURL = QTestUtil.class.getResource(hadoopPropsLoc); + if (hadoopPropsURL == null) { + System.err.println("Failed to get hadoop properties: " + hadoopPropsLoc); + return null; + } + try (InputStream is = hadoopPropsURL.openStream()) { + Properties props = new Properties(); + props.load(is); + String foundHadoopVersion = props.getProperty("version"); + if (foundHadoopVersion == null) { + System.err.println("Failed to found version property for hadoop"); + } + return foundHadoopVersion; + } catch (IOException e) { + System.err.println("Unable to extract hadoop.version from: " + hadoopPropsURL); + } + return null; + } + + /** + * Run a test query, where the expected result is success + * @param qfile The query file to run + * @throws Exception If there is an exception during the run + */ + public void runQuery(File qfile) throws Exception { + runTest(qfile, false); + } + + /** + * Run a test query, where the expected result is failure + * @param qfile The query file to run + * @throws Exception If there is an exception during the run + */ + public void runFailingQuery(File qfile) throws Exception { + runTest(qfile, true); + } + + /** + * Run the test with an expected result + * @param qfile The query file to run + * @param expectedToFail If the test expecte to fail should be true, else false + * @throws Exception If there is an exception during the run + */ + protected void runTest(File qfile, boolean expectedToFail) throws Exception { + Stopwatch sw = new Stopwatch().start(); + TestStatus status = TestStatus.started; + String fname = qfile.getName(); + try { + LOG.info("Begin query: " + fname); + System.err.println("Begin query: " + fname); + + addFile(qfile); + + if (shouldBeSkipped(fname)) { + System.err.println("Test " + fname + " skipped"); + status = TestStatus.skipped; + return; + } + + cliInit(fname, false); + int ecode = executeClient(fname); + if ( (ecode != 0 && !expectedToFail) || (ecode == 0 && expectedToFail) ) { + status = TestStatus.failed; + failed(ecode, fname, DEBUG_HINT); + } else { + ecode = checkCliDriverResults(fname); + if (ecode != 0) { + status = TestStatus.diffError; + failedDiff(ecode, fname, DEBUG_HINT); + } else { + status = TestStatus.succeeded; + } + } + } catch (Exception e) { + status = TestStatus.exception; + failed(e, fname, DEBUG_HINT); + } finally { + String message = "Done query: " + fname + ". Result=" + status + ". ElapsedTime(ms)=" + + sw.stop().elapsed(TimeUnit.MILLISECONDS); + LOG.info(message); + System.err.println(message); + } + } + + /** + * Run a multiversioned test file + * @param qfile The base testfile to run + * @throws Exception If there is an exception during the run + */ + public void runVersionedTest(File qfile) throws Exception { + Stopwatch sw = new Stopwatch().start(); + TestStatus status = TestStatus.started; + String fname = qfile.getName(); + String tname = fname.replaceAll("\\.[^\\.]+$", ""); + String queryDirectory = qfile.getParent(); + + try { + LOG.info("Begin query: " + fname); + System.err.println("Begin query: " + fname); + + // TODO: versions could also be picked at build time. + List versionFiles = QTestUtil.getVersionFiles(queryDirectory, tname); + if (versionFiles.size() < 2) { + failed(fname, "Cannot run " + tname + " with only " + versionFiles.size() + " " + + "versions"); + status = TestStatus.failed; + } + + addFile(qfile); + for (String versionFile : versionFiles) { + addFile(new File(queryDirectory, versionFile), true); + } + + if (shouldBeSkipped(fname)) { + status = TestStatus.skipped; + return; + } + + int ecode = 0; + List outputs = new ArrayList(versionFiles.size()); + for (String versionFile : versionFiles) { + // 1 for "_" after tname; 3 for ".qv" at the end. Version is in between. + String versionStr = versionFile.substring(tname.length() + 1, versionFile.length() - 3); + outputs.add(cliInit(tname + "." + versionStr, false)); + // TODO: will this work? + ecode = executeClient(versionFile, fname); + if (ecode != 0) { + status = TestStatus.failed; + failed(ecode, fname, DEBUG_HINT); + } + } + + ecode = checkCompareCliDriverResults(fname, outputs); + if (ecode != 0) { + status = TestStatus.diffError; + failedDiff(ecode, fname, DEBUG_HINT); + } else { + status = TestStatus.succeeded; + } + } catch (Exception e) { + status = TestStatus.exception; + failed(e, fname, DEBUG_HINT); + } finally { + String message = "Done query: " + fname + ". Result=" + status + ". ElapsedTime(ms)=" + + sw.stop().elapsed(TimeUnit.MILLISECONDS); + LOG.info(message); + System.err.println(message); + } + } + + /** + * Parse error tests. The expected result is failure, and no change should happen on the test + * database + * @param qfile The query file to run + * @exception Exception during checking the result + */ + public void runParseTest(File qfile) throws Exception { + Stopwatch sw = new Stopwatch().start(); + TestStatus status = TestStatus.started; + String fname = qfile.getName(); + try { + try { + LOG.info("Begin query: " + fname); + System.err.println("Begin query: " + fname); + + addFile(qfile); + + ASTNode tree = parseQuery(fname); + analyzeAST(tree); + status = TestStatus.failed; + failed(fname, "Unexpected success for query: " + fname + DEBUG_HINT); + } catch (ParseException pe) { + int ecode = checkNegativeResults(fname, pe); + if (ecode != 0) { + status = TestStatus.diffError; + failed(ecode, fname, DEBUG_HINT); + } else { + status = TestStatus.succeeded; + } + } catch (SemanticException se) { + int ecode = checkNegativeResults(fname, se); + if (ecode != 0) { + status = TestStatus.diffError; + failedDiff(ecode, fname, DEBUG_HINT); + } else { + status = TestStatus.succeeded; + } + } catch (Exception e) { + status = TestStatus.exception; + failed(e, fname, DEBUG_HINT); + } + } finally { + String message = "Done query: " + fname + ". Result=" + status + ". ElapsedTime(ms)=" + + sw.stop().elapsed(TimeUnit.MILLISECONDS); + LOG.info(message); + System.err.println(message); + } + } + + private enum TestStatus { + started, + skipped, + failed, + diffError, + exception, + succeeded + } } diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java deleted file mode 100644 index 8dba0bb..0000000 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java +++ /dev/null @@ -1,129 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.parse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.io.Serializable; -import java.util.List; -import org.apache.hadoop.hive.cli.control.AbstractCliConfig; -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; -import org.apache.hadoop.hive.ql.QTestUtil; -import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; -import org.apache.hadoop.hive.ql.exec.Task; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; - -public class CoreParseNegative extends CliAdapter{ - - private static QTestUtil qt; - - static CliConfigs.ParseNegativeConfig cliConfig = new CliConfigs.ParseNegativeConfig(); - static boolean firstRun; - public CoreParseNegative(AbstractCliConfig testCliConfig) { - super(testCliConfig); - } - - @Override - @BeforeClass - public void beforeClass() { - MiniClusterType miniMR = cliConfig.getClusterType(); - String initScript = cliConfig.getInitScript(); - String cleanupScript = cliConfig.getCleanupScript(); - firstRun = true; - try { - String hadoopVer = cliConfig.getHadoopVersion(); - qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, null, - hadoopVer, - initScript, cleanupScript, false, false); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - throw new RuntimeException("Unexpected exception in static initialization",e); - } - } - - @Override - public void setUp() { - } - - @Override - @After - public void tearDown() { - } - - @Override - @AfterClass - public void shutdown() throws Exception { - String reason = "clear post test effects"; - try { - qt.clearPostTestEffects(); - reason = "shutdown"; - qt.shutdown(); - } catch (Exception e) { - System.err.println("Exception: " + e.getMessage()); - e.printStackTrace(); - System.err.flush(); - throw new RuntimeException("Unexpected exception in " + reason,e); - } - } - - static String debugHint = "\nSee ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, " - + "or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs."; - - - @Override - public void runTest(String tname, String fname, String fpath) throws Exception { - long startTime = System.currentTimeMillis(); - try { - System.err.println("Begin query: " + fname); - - qt.addFile(fpath); - if (firstRun) { - qt.init(fname); - firstRun = false; - } - ASTNode tree = qt.parseQuery(fname); - List> tasks = qt.analyzeAST(tree); - fail("Unexpected success for query: " + fname + debugHint); - } - catch (ParseException pe) { - int ecode = qt.checkNegativeResults(fname, pe); - if (ecode != 0) { - qt.failed(ecode, fname, debugHint); - } - } - catch (SemanticException se) { - int ecode = qt.checkNegativeResults(fname, se); - if (ecode != 0) { - qt.failedDiff(ecode, fname, debugHint); - } - } - catch (Throwable e) { - qt.failed(e, fname, debugHint); - } - - long elapsedTime = System.currentTimeMillis() - startTime; - System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s"); - assertTrue("Test passed", true); - } - -} diff --git pom.xml pom.xml index 9ed1c19..519f038 100644 --- pom.xml +++ pom.xml @@ -1049,6 +1049,7 @@ ${clustermode} ${qfile_regex} ${run_disabled} + ${hadoop.version}