diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java index b89d6e7..3402c6d 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java @@ -51,10 +51,10 @@ public CliAdapter(AbstractCliConfig cliConfig) { abstract public void beforeClass() throws Exception; // HIVE-14444 pending rename: before - abstract public void setUp(); + abstract public void setUp() throws Exception; // HIVE-14444 pending rename: after - abstract public void tearDown(); + abstract public void tearDown() throws Exception; // HIVE-14444 pending rename: afterClass abstract public void shutdown() throws Exception; diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java index 9dfc253..e1614f3 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConfUtil; import org.apache.hadoop.hive.ql.QTestProcessExecResult; +import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.hooks.PreExecutePrinter; import org.apache.hive.beeline.ConvertedOutputFile.Converter; import org.apache.hive.beeline.QFile; @@ -38,6 +39,8 @@ import java.io.IOException; import java.sql.SQLException; import java.util.HashMap; +import java.util.HashSet; +import java.util.Set; public class CoreBeeLineDriver extends CliAdapter { private final File hiveRootDirectory = new File(AbstractCliConfig.HIVE_ROOT); @@ -49,7 +52,7 @@ private final File testDataDirectory; private final File testScriptDirectory; private boolean overwrite = false; - private boolean rewriteSourceTables = true; + private boolean parallel = false; private MiniHS2 miniHS2; private QFileClientBuilder clientBuilder; private QFileBuilder fileBuilder; @@ -95,9 +98,9 @@ public void beforeClass() throws Exception { if (testOutputOverwrite != null && "true".equalsIgnoreCase(testOutputOverwrite)) { overwrite = true; } - String testRewriteSourceTables = System.getProperty("test.rewrite.source.tables"); - if (testRewriteSourceTables != null && "false".equalsIgnoreCase(testRewriteSourceTables)) { - rewriteSourceTables = false; + String parallelString = System.getProperty("test.beeline.run.parallel"); + if (parallelString != null && "true".equalsIgnoreCase(parallelString)) { + parallel = true; } String beeLineUrl = System.getProperty("test.beeline.url"); @@ -116,7 +119,7 @@ public void beforeClass() throws Exception { .setLogDirectory(logDirectory) .setQueryDirectory(queryDirectory) .setResultsDirectory(resultsDirectory) - .setRewriteSourceTables(rewriteSourceTables); + .setTestSpecificDatabase(parallel); runInfraScript(initScript, new File(logDirectory, "init.beeline"), new File(logDirectory, "init.raw")); @@ -125,19 +128,55 @@ public void beforeClass() throws Exception { protected void runInfraScript(File script, File beeLineOutput, File log) throws IOException, SQLException { try (QFileBeeLineClient beeLineClient = clientBuilder.getClient(beeLineOutput)) { + runCommands(beeLineClient, new String[] {"!run " + script}, log); + } catch (Exception e) { + throw new SQLException("Error running infra script: " + script + + "\nCheck the following logs for details:\n - " + beeLineOutput + "\n - " + log, e); + } + } + + protected void runCleanup(File beeLineOutput, File log) throws IOException, SQLException { + Set dropCommands = new HashSet(); + + try (QFileBeeLineClient beeLineClient = clientBuilder.getClient(beeLineOutput)) { + for(String database:beeLineClient.getDatabases()) { + if (!database.equals("default")) { + dropCommands.add("DROP DATABASE `" + database + "` CASCADE;"); + } + } + + Set srcTables = QTestUtil.getSrcTables(); + for(String table:beeLineClient.getTables()) { + if (!srcTables.contains(table)) { + dropCommands.add("DROP TABLE `" + table + "` PURGE;"); + } + } + + for(String view:beeLineClient.getViews()) { + dropCommands.add("DROP VIEW `" + view + "`;"); + } + if (dropCommands.size() > 0) { + runCommands(beeLineClient, dropCommands.toArray(new String[]{}), log); + } + } catch (Exception e) { + throw new SQLException("Error running cleanup script." + + "\nCheck the following logs for details:\n - " + beeLineOutput + "\n - " + log, e); + } + } + + + protected void runCommands(QFileBeeLineClient beeLineClient, String[] commands, File log) + throws Exception + { beeLineClient.execute( new String[]{ - "set hive.exec.pre.hooks=" + PreExecutePrinter.class.getName() + ";", - "set test.data.dir=" + testDataDirectory + ";", - "set test.script.dir=" + testScriptDirectory + ";", - "!run " + script, + "set hive.exec.pre.hooks=" + PreExecutePrinter.class.getName() + ";", + "set test.data.dir=" + testDataDirectory + ";", + "set test.script.dir=" + testScriptDirectory + ";", }, log, Converter.NONE); - } catch (Exception e) { - throw new SQLException("Error running infra script: " + script - + "\nCheck the following logs for details:\n - " + beeLineOutput + "\n - " + log, e); - } + beeLineClient.execute(commands, log, Converter.NONE); } @Override @@ -193,7 +232,10 @@ public void runTest(QFile qFile) throws Exception { } @Override - public void setUp() { + public void setUp() throws IOException, SQLException { + if (!parallel) { + runCleanup(new File(logDirectory, "drop.beeline"), new File(logDirectory, "drop.raw")); + } } @Override diff --git itests/util/src/main/java/org/apache/hive/beeline/QFile.java itests/util/src/main/java/org/apache/hive/beeline/QFile.java index e70ac38..ed67c97 100644 --- itests/util/src/main/java/org/apache/hive/beeline/QFile.java +++ itests/util/src/main/java/org/apache/hive/beeline/QFile.java @@ -78,7 +78,7 @@ private File afterExecuteLogFile; private static RegexFilterSet staticFilterSet = getStaticFilterSet(); private RegexFilterSet specificFilterSet; - private boolean rewriteSourceTables; + private boolean testSpecificDatabase; private Converter converter; private QFile() {} @@ -123,6 +123,10 @@ public Converter getConverter() { return converter; } + public boolean isTestSpecificDatabase() { + return testSpecificDatabase; + } + public String getDebugHint() { return String.format(DEBUG_HINT, inputFile, rawOutputFile, outputFile, expectedOutputFile, logFile, beforeExecuteLogFile, afterExecuteLogFile, @@ -136,7 +140,7 @@ public String getDebugHint() { * @throws IOException File read error */ public String[] filterCommands(String[] commands) throws IOException { - if (rewriteSourceTables) { + if (testSpecificDatabase) { for (int i=0; i getDatabases() throws SQLException { + Set databases = new HashSet(); + + DatabaseMetaData metaData = beeLine.getDatabaseMetaData(); + // Get the databases + try (ResultSet schemasResultSet = metaData.getSchemas()) { + while (schemasResultSet.next()) { + databases.add(schemasResultSet.getString("TABLE_SCHEM")); + } + } + return databases; + } + + public Set getTables() throws SQLException { + Set tables = new HashSet(); + + DatabaseMetaData metaData = beeLine.getDatabaseMetaData(); + // Get the tables in the default database + String[] types = new String[] {"TABLE"}; + try (ResultSet tablesResultSet = metaData.getTables(null, "default", "%", types)) { + while (tablesResultSet.next()) { + tables.add(tablesResultSet.getString("TABLE_NAME")); + } + } + return tables; + } + + public Set getViews() throws SQLException { + Set views = new HashSet(); + + DatabaseMetaData metaData = beeLine.getDatabaseMetaData(); + // Get the tables in the default database + String[] types = new String[] {"VIEW"}; + try (ResultSet tablesResultSet = metaData.getTables(null, "default", "%", types)) { + while (tablesResultSet.next()) { + views.add(tablesResultSet.getString("TABLE_NAME")); + } + } + return views; + } + public void execute(String[] commands, File resultFile, Converter converter) throws Exception { beeLine.runCommands( @@ -69,39 +135,32 @@ public void execute(String[] commands, File resultFile, Converter converter) } private void beforeExecute(QFile qFile) throws Exception { - execute( - new String[] { - "!set outputformat tsv2", - "!set verbose false", - "!set silent true", - "!set showheader false", - "USE default;", - "SHOW TABLES;", - "DROP DATABASE IF EXISTS `" + qFile.getDatabaseName() + "` CASCADE;", - "CREATE DATABASE `" + qFile.getDatabaseName() + "`;", - "USE `" + qFile.getDatabaseName() + "`;", - "set hive.testing.short.logs=true;", - "set hive.testing.remove.logs=false;", - }, - qFile.getBeforeExecuteLogFile(), - Converter.NONE); + String[] commands = TEST_FIRST_COMMANDS; + + if (qFile.isTestSpecificDatabase()) { + String[] extraCommands = new String[] { + "DROP DATABASE IF EXISTS `" + qFile.getDatabaseName() + "` CASCADE;", + "CREATE DATABASE `" + qFile.getDatabaseName() + "`;", + "USE `" + qFile.getDatabaseName() + "`;" + }; + commands = ArrayUtils.addAll(commands, extraCommands); + } + commands = ArrayUtils.addAll(commands, TEST_SET_LOG_COMMANDS); + execute(commands, qFile.getBeforeExecuteLogFile(), Converter.NONE); beeLine.setIsTestMode(true); } private void afterExecute(QFile qFile) throws Exception { beeLine.setIsTestMode(false); - execute( - new String[] { - "set hive.testing.short.logs=false;", - "!set verbose true", - "!set silent false", - "!set showheader true", - "!set outputformat table", - "USE default;", - "DROP DATABASE IF EXISTS `" + qFile.getDatabaseName() + "` CASCADE;", - }, - qFile.getAfterExecuteLogFile(), - Converter.NONE); + String[] commands = TEST_RESET_COMMANDS; + + if (qFile.isTestSpecificDatabase()) { + String[] extraCommands = new String[] { + "DROP DATABASE IF EXISTS `" + qFile.getDatabaseName() + "` CASCADE;" + }; + commands = ArrayUtils.addAll(commands, extraCommands); + } + execute(commands, qFile.getAfterExecuteLogFile(), Converter.NONE); } public void execute(QFile qFile) throws Exception {