diff --git data/scripts/q_test_init.sql data/scripts/q_test_init.sql index df0582814a..e69de29bb2 100644 --- data/scripts/q_test_init.sql +++ data/scripts/q_test_init.sql @@ -1,10 +0,0 @@ -set hive.stats.dbclass=fs; - --- --- Function qtest_get_java_boolean --- -DROP FUNCTION IF EXISTS qtest_get_java_boolean; -CREATE FUNCTION qtest_get_java_boolean AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean'; - -reset; -set hive.stats.dbclass=fs; diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java index eb3b935f09..383e35e401 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java @@ -122,6 +122,7 @@ public void testAlterTablePartitionLocation_alter5() throws Exception { for (int i = 0; i < qfiles.length; i++) { qt[i] = new CheckResults(resDir, logDir, MiniClusterType.NONE, "parta"); + qt[i].postInit(); qt[i].newSession(); qt[i].addFile(qfiles[i], false); qt[i].clearTestSideEffects(); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java index 3e0cdac67c..d72c14a6c1 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java @@ -42,6 +42,7 @@ public void testMTQueries1() throws Exception { QTestUtil[] qts = QTestRunnerUtils.queryListRunnerSetup(qfiles, resDir, logDir, "q_test_init_src_with_stats.sql", "q_test_cleanup_src_with_stats.sql"); for (QTestUtil util : qts) { + util.postInit(); // derby fails creating multiple stats aggregator concurrently util.getConf().setBoolean("hive.exec.submitviachild", true); util.getConf().setBoolean("hive.exec.submit.local.task.via.child", true); diff --git itests/qtest/pom.xml itests/qtest/pom.xml index 109f7a10f0..be8e377891 100644 --- itests/qtest/pom.xml +++ itests/qtest/pom.xml @@ -401,6 +401,20 @@ org.slf4j slf4j-api + + + org.mariadb.jdbc + mariadb-java-client + ${mariadb.version} + test + + + org.postgresql + postgresql + 9.3-1102-jdbc41 + test + @@ -436,6 +450,24 @@ org.apache.maven.plugins maven-antrun-plugin + + setup-metastore-scripts + process-test-resources + + run + + + + + + + + + + + + + generate-tests-sources generate-test-sources @@ -458,10 +490,6 @@ - - - - @@ -488,6 +516,17 @@ + + org.apache.maven.plugins + maven-surefire-plugin + + + ${test.conf.dir} + ${basedir}/${hive.path.to.root}/conf + ${itest.jdbc.jars} + + + diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java index 50417e9378..3c0ba149fb 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java @@ -78,9 +78,6 @@ public void beforeClass() { // do a one time initialization setupUniqueTestPath(); - qt.newSession(); - qt.cleanUp(); - qt.createSources(); } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); @@ -135,6 +132,11 @@ public void shutdown() { } } + @Override + protected QTestUtil getQt() { + return qt; + } + private static String debugHint = "\nSee ./itests/hive-blobstore/target/tmp/log/hive.log, " + "or check ./itests/hive-blobstore/target/surefire-reports/ for specific test cases logs."; diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java index 574a67f2e3..fcfc79059a 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java @@ -22,9 +22,13 @@ import java.util.List; import java.util.Set; +import org.apache.hadoop.hive.ql.QTestMetaStoreHandler; +import org.apache.hadoop.hive.ql.QTestUtil; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runners.model.Statement; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class adapts old vm test-executors to be executed in multiple instances @@ -32,9 +36,13 @@ public abstract class CliAdapter { protected final AbstractCliConfig cliConfig; + protected QTestMetaStoreHandler metaStoreHandler; + boolean firstTestNotYetRun = true; // this can protect class/test level logic from each other + private static final Logger LOG = LoggerFactory.getLogger(CliAdapter.class); public CliAdapter(AbstractCliConfig cliConfig) { this.cliConfig = cliConfig; + metaStoreHandler = new QTestMetaStoreHandler(); } public final List getParameters() throws Exception { @@ -68,11 +76,29 @@ public Statement apply(final Statement base, Description description) { return new Statement() { @Override public void evaluate() throws Throwable { - CliAdapter.this.beforeClass(); + metaStoreHandler.setSystemProperties(); // for QTestUtil pre-initialization + CliAdapter.this.beforeClass(); // instantiating QTestUtil + + LOG.debug("will initialize metastore database in class rule"); + metaStoreHandler.getRule().before(); + metaStoreHandler.getRule().install(); + + if (getQt() != null) { + metaStoreHandler.setMetaStoreConfiguration(getQt().getConf()); + getQt().postInit(); + getQt().newSession(); + getQt().createSources(); + } + + CliAdapter.this.beforeClassSpec(); try { base.evaluate(); } finally { CliAdapter.this.shutdown(); + if (getQt() != null && firstTestNotYetRun) { + LOG.debug("will destroy metastore database in class rule (if not derby)"); + metaStoreHandler.afterTest(getQt()); + } } } }; @@ -80,6 +106,10 @@ public void evaluate() throws Throwable { }; } + // override this if e.g. a metastore dependent init logic is needed + protected void beforeClassSpec() { + } + public final TestRule buildTestRule() { return new TestRule() { @Override @@ -87,11 +117,28 @@ public Statement apply(final Statement base, Description description) { return new Statement() { @Override public void evaluate() throws Throwable { + + if (getQt() != null && !firstTestNotYetRun) { + LOG.debug("will initialize metastore database in test rule"); + metaStoreHandler.setMetaStoreConfiguration(getQt().getConf()); + metaStoreHandler.beforeTest(); + } + firstTestNotYetRun = false; + + if (getQt() != null && CliAdapter.this.shouldRunCreateScriptBeforeEveryTest()){ + // it's because some drivers still use init scripts, which can create a non-dataset table + // and get cleant after every test + getQt().createSources(); + } CliAdapter.this.setUp(); try { base.evaluate(); } finally { CliAdapter.this.tearDown(); + if (getQt() != null) { + LOG.debug("will destroy metastore database in test rule (if not derby)"); + metaStoreHandler.afterTest(getQt()); + } } } }; @@ -99,6 +146,12 @@ public void evaluate() throws Throwable { }; } + protected boolean shouldRunCreateScriptBeforeEveryTest() { + return false; + } + + protected abstract QTestUtil getQt(); + // HIVE-14444: pending refactor to push File forward public final void runTest(String name, File qfile) throws Exception { runTest(name, qfile.getName(), qfile.getAbsolutePath()); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java index 9a23ef855e..e6c5e701b6 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.accumulo.AccumuloQTestUtil; import org.apache.hadoop.hive.accumulo.AccumuloTestSetup; import org.apache.hadoop.hive.ql.QTestProcessExecResult; +import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestMiniClusters.MiniClusterType; import org.apache.hadoop.hive.ql.processors.CommandProcessorException; import org.junit.After; @@ -50,12 +51,6 @@ public void beforeClass() { try { qt = new AccumuloQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, new AccumuloTestSetup(), initScript, cleanupScript); - - // do a one time initialization - qt.newSession(); - qt.cleanUp(); - qt.createSources(); - } catch (Exception e) { throw new RuntimeException("Unexpected exception in setUp", e); } @@ -66,7 +61,6 @@ public void beforeClass() { public void shutdown() { try { qt.shutdown(); - } catch (Exception e) { throw new RuntimeException("Unexpected exception in tearDown", e); } @@ -77,7 +71,6 @@ public void shutdown() { public void setUp() { try { qt.newSession(); - } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -92,7 +85,6 @@ public void tearDown() { try { qt.clearPostTestEffects(); qt.clearTestSideEffects(); - } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -101,6 +93,11 @@ public void tearDown() { } } + @Override + protected QTestUtil getQt() { + return qt; + } + @Override public void runTest(String tname, String fname, String fpath) { long startTime = System.currentTimeMillis(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java index c8239a731c..cd6bc34cc1 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConfUtil; import org.apache.hadoop.hive.ql.QTestProcessExecResult; +import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.dataset.Dataset; import org.apache.hadoop.hive.ql.dataset.DatasetCollection; import org.apache.hadoop.hive.ql.dataset.QTestDatasetHandler; @@ -202,6 +203,11 @@ public void shutdown() throws Exception { } } + @Override + protected QTestUtil getQt() { + return null; + } + private void runTest(QFile qFile, List> preCommands) throws Exception { try (QFileBeeLineClient beeLineClient = clientBuilder.getClient(qFile.getLogFile())) { long startTime = System.currentTimeMillis(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java index d06acfb978..7a903099ff 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java @@ -57,7 +57,7 @@ public void beforeClass() { LOG.info(message); System.err.println(message); - MiniClusterType miniMR =cliConfig.getClusterType(); + MiniClusterType miniMR = cliConfig.getClusterType(); String hiveConfDir = cliConfig.getHiveConfDir(); String initScript = cliConfig.getInitScript(); String cleanupScript = cliConfig.getCleanupScript(); @@ -79,25 +79,6 @@ public QTestUtil invokeInternal() throws Exception { .build()); } }.invoke("QtestUtil instance created", LOG, true); - - // do a one time initialization - new ElapsedTimeLoggingWrapper() { - @Override - public Void invokeInternal() throws Exception { - qt.newSession(); - qt.cleanUp(); // I don't think this is neccessary... - return null; - } - }.invoke("Initialization cleanup done.", LOG, true); - - new ElapsedTimeLoggingWrapper() { - @Override - public Void invokeInternal() throws Exception { - qt.createSources(); - return null; - } - }.invoke("Initialization createSources done.", LOG, true); - } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -167,6 +148,11 @@ public Void invokeInternal() throws Exception { } } + @Override + protected QTestUtil getQt() { + return qt; + } + @Override public void runTest(String testName, String fname, String fpath) { Stopwatch sw = Stopwatch.createStarted(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java index 62ea96089a..9a519ff2a7 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java @@ -64,12 +64,6 @@ public void beforeClass() { .withCleanupScript(cleanupScript) .withLlapIo(false) .build()); - - // do a one time initialization - qt.newSession(); - qt.cleanUp(); - qt.createSources(); - } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -119,6 +113,11 @@ public void shutdown() { } } + @Override + protected QTestUtil getQt() { + return qt; + } + @Override public void runTest(String tname, String fname, String fpath) { final String queryDirectory = cliConfig.getQueryDirectory(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java index 301b91e54e..9684565f11 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java @@ -22,6 +22,7 @@ import java.util.List; import org.apache.hadoop.hive.ql.QFileVersionHandler; +import org.apache.hadoop.hive.ql.QTestUtil; public class CoreDummy extends CliAdapter { QFileVersionHandler qvh = new QFileVersionHandler(); @@ -46,6 +47,11 @@ public void tearDown() { public void shutdown() { } + @Override + protected QTestUtil getQt() { + return null; + } + @Override public void runTest(String name, String name2, String absolutePath) { List versionFiles = qvh.getVersionFiles(cliConfig.getQueryDirectory(), name); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java index 40545d8d65..5cad1e278f 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.hbase.HBaseQTestUtil; import org.apache.hadoop.hive.hbase.HBaseTestSetup; import org.apache.hadoop.hive.ql.QTestProcessExecResult; +import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestMiniClusters.MiniClusterType; import org.apache.hadoop.hive.ql.processors.CommandProcessorException; import org.junit.After; @@ -50,11 +51,6 @@ public void beforeClass() { try { qt = new HBaseQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, new HBaseTestSetup(), initScript, cleanupScript); - - qt.newSession(); - qt.cleanUp(null); - qt.createSources(null); - } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -102,6 +98,11 @@ public void shutdown() { } } + @Override + protected QTestUtil getQt() { + return qt; + } + @Override public void runTest(String tname, String fname, String fpath) { long startTime = System.currentTimeMillis(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java index 6094e6dffb..62672e6933 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.hbase.HBaseQTestUtil; import org.apache.hadoop.hive.hbase.HBaseTestSetup; import org.apache.hadoop.hive.ql.QTestProcessExecResult; +import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.processors.CommandProcessorException; import org.apache.hadoop.hive.ql.QTestMiniClusters.MiniClusterType; import org.junit.After; @@ -98,6 +99,11 @@ public void shutdown() { } } + @Override + protected QTestUtil getQt() { + return qt; + } + @Override public void runTest(String tname, String fname, String fpath) { long startTime = System.currentTimeMillis(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java index 71134e7b0a..ab9c36fa69 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java @@ -57,11 +57,6 @@ public void beforeClass() { .withLlapIo(true) .withQTestSetup(new KuduTestSetup()) .build()); - - // do a one time initialization - qt.newSession(); - qt.cleanUp(); - qt.createSources(); } catch (Exception e) { throw new RuntimeException("Unexpected exception in setUp", e); } @@ -106,6 +101,11 @@ public void tearDown() { } } + @Override + protected QTestUtil getQt() { + return qt; + } + @Override public void runTest(String tname, String fname, String fpath) { long startTime = System.currentTimeMillis(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduNegativeCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduNegativeCliDriver.java index 4f6988c9f3..0fa3ac700c 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduNegativeCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduNegativeCliDriver.java @@ -57,11 +57,6 @@ public void beforeClass() { .withLlapIo(true) .withQTestSetup(new KuduTestSetup()) .build()); - - // do a one time initialization - qt.newSession(); - qt.cleanUp(); - qt.createSources(); } catch (Exception e) { throw new RuntimeException("Unexpected exception in setUp", e); } @@ -106,6 +101,11 @@ public void tearDown() { } } + @Override + protected QTestUtil getQt() { + return qt; + } + @Override public void runTest(String tname, String fname, String fpath) { long startTime = System.currentTimeMillis(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java index bb9e65524d..2536c1dd83 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java @@ -59,11 +59,6 @@ public void beforeClass(){ .withCleanupScript(cleanupScript) .withLlapIo(false) .build()); - // do a one time initialization - qt.newSession(); - qt.cleanUp(); - qt.createSources(); - } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -114,6 +109,11 @@ public void shutdown() { } } + @Override + protected QTestUtil getQt() { + return qt; + } + @Override public void runTest(String tname, String fname, String fpath) throws Exception { long startTime = System.currentTimeMillis(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java index 59c71f544c..06d1e16442 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java @@ -32,6 +32,8 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorException; import org.junit.After; import org.junit.AfterClass; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Strings; @@ -45,6 +47,7 @@ */ public class CorePerfCliDriver extends CliAdapter { + private static final Logger LOG = LoggerFactory.getLogger(CorePerfCliDriver.class); private static QTestUtil qt; public CorePerfCliDriver(AbstractCliConfig testCliConfig) { @@ -66,16 +69,6 @@ public void beforeClass() { .withOutDir(cliConfig.getResultsDir()).withLogDir(cliConfig.getLogDir()) .withClusterType(miniMR).withConfDir(hiveConfDir).withInitScript(initScript) .withCleanupScript(cleanupScript).withLlapIo(false).build()); - - // do a one time initialization - qt.newSession(); - qt.cleanUp(); - qt.createSources(); - // Manually modify the underlying metastore db to reflect statistics corresponding to - // the 30TB TPCDS scale set. This way the optimizer will generate plans for a 30 TB set. - MetaStoreDumpUtility.setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(qt.getConf(), - QTestSystemProperties.getTempDir()); - } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -85,6 +78,18 @@ public void beforeClass() { } } + @Override + protected void beforeClassSpec() { + overrideStatsInMetastore(); + } + + private void overrideStatsInMetastore() { + // Manually modify the underlying metastore db to reflect statistics corresponding to + // the 30TB TPCDS scale set. This way the optimizer will generate plans for a 30 TB set. + MetaStoreDumpUtility.setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(qt.getConf(), + QTestSystemProperties.getTempDir()); + } + @Override @AfterClass public void shutdown() throws Exception { @@ -95,7 +100,6 @@ public void shutdown() throws Exception { public void setUp() { try { qt.newSession(); - } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -109,7 +113,6 @@ public void setUp() { public void tearDown() { try { qt.clearPostTestEffects(); - } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -118,10 +121,16 @@ public void tearDown() { } } + @Override + protected QTestUtil getQt() { + return qt; + } + @Override public void runTest(String name, String fname, String fpath) { long startTime = System.currentTimeMillis(); try { + LOG.info("Begin query: " + fname); System.err.println("Begin query: " + fname); qt.addFile(fpath); @@ -144,7 +153,9 @@ public void runTest(String name, String fname, String fpath) { } long elapsedTime = System.currentTimeMillis() - startTime; - System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime / 1000 + "s"); + String message = "Done query: " + fname + " elapsedTime=" + elapsedTime / 1000 + "s"; + LOG.info(message); + System.err.println(message); assertTrue("Test passed", true); } diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestMetaStoreHandler.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestMetaStoreHandler.java new file mode 100644 index 0000000000..b86d736a89 --- /dev/null +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestMetaStoreHandler.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule; +import org.apache.hadoop.hive.metastore.dbinstall.rules.Derby; +import org.apache.hadoop.hive.metastore.dbinstall.rules.Mssql; +import org.apache.hadoop.hive.metastore.dbinstall.rules.Mysql; +import org.apache.hadoop.hive.metastore.dbinstall.rules.Oracle; +import org.apache.hadoop.hive.metastore.dbinstall.rules.Postgres; +import org.apache.hadoop.hive.metastore.txn.TxnDbUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * QTestMetaStoreHandler is responsible for wrapping the logic of handling different metastore + * databases in qtests. + */ +public class QTestMetaStoreHandler { + private static final Logger LOG = LoggerFactory.getLogger(QTestMetaStoreHandler.class); + + private String metastoreType; + private DatabaseRule rule; + + public QTestMetaStoreHandler() { + this.metastoreType = QTestSystemProperties.getMetaStoreDb() == null ? "derby" + : QTestSystemProperties.getMetaStoreDb(); + + this.rule = getDatabaseRule(metastoreType).setVerbose(false); + + LOG.info(String.format("initialized metastore type '%s' for qtests", metastoreType)); + } + + public DatabaseRule getRule() { + return rule; + } + + public boolean isDerby() { + return "derby".equalsIgnoreCase(metastoreType); + } + + public QTestMetaStoreHandler setMetaStoreConfiguration(HiveConf conf) { + conf.setVar(ConfVars.METASTOREDBTYPE, getDbTypeConfString()); + + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CONNECT_URL_KEY, rule.getJdbcUrl()); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CONNECTION_DRIVER, rule.getJdbcDriver()); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CONNECTION_USER_NAME, rule.getHiveUser()); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.PWD, rule.getHivePassword()); + + LOG.info(String.format("set metastore connection to url: %s", + MetastoreConf.getVar(conf, MetastoreConf.ConfVars.CONNECT_URL_KEY))); + + return this; + } + + private DatabaseRule getDatabaseRule(String metastoreType) { + switch (metastoreType) { + case "postgres": + return new Postgres(); + case "oracle": + return new Oracle(); + case "mysql": + return new Mysql(); + case "mssql": + case "sqlserver": + return new Mssql(); + default: + return new Derby(); + } + } + + private String getDbTypeConfString() {// "ORACLE", "MYSQL", "MSSQL", "POSTGRES" + return "sqlserver".equalsIgnoreCase(metastoreType) ? "MSSQL" : metastoreType.toUpperCase(); + } + + public void beforeTest() throws Exception { + getRule().before(); + if (!isDerby()) {// derby is handled with old QTestUtil logic (TxnDbUtil stuff) + getRule().install(); + } + } + + public void afterTest(QTestUtil qt) throws Exception { + getRule().after(); + + // special qtest logic, which doesn't fit quite well into Derby.after() + if (isDerby()) { + TxnDbUtil.cleanDb(qt.getConf()); + TxnDbUtil.prepDb(qt.getConf()); + } + } + + public void setSystemProperties() { + System.setProperty(MetastoreConf.ConfVars.CONNECT_URL_KEY.getVarname(), rule.getJdbcUrl()); + System.setProperty(MetastoreConf.ConfVars.CONNECTION_DRIVER.getVarname(), rule.getJdbcDriver()); + System.setProperty(MetastoreConf.ConfVars.CONNECTION_USER_NAME.getVarname(), rule.getHiveUser()); + System.setProperty(MetastoreConf.ConfVars.PWD.getVarname(), rule.getHivePassword()); + } +} diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSystemProperties.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSystemProperties.java index f82d17e5b3..89b9c01b8f 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSystemProperties.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSystemProperties.java @@ -28,6 +28,7 @@ private static final String SYS_PROP_VECTORIZATION_ENABLED = "test.vectorization.enabled"; private static final String SYS_PROP_CHECK_SYNTAX = "test.check.syntax"; private static final String SYS_PROP_FORCE_EXCLUSIONS = "test.force.exclusions"; + private static final String SYS_PROP_METASTORE_DB = "test.metastore.db"; private static final String SYS_PROP_BUILD_DIR = "build.dir"; // typically target public static String getTempDir() { @@ -50,6 +51,11 @@ public static String getBuildDir() { return System.getProperty(SYS_PROP_BUILD_DIR); } + public static String getMetaStoreDb() { + return System.getProperty(SYS_PROP_METASTORE_DB) == null ? null + : System.getProperty(SYS_PROP_METASTORE_DB).toLowerCase(); + } + public static boolean isVectorizationEnabled() { return isTrue(SYS_PROP_VECTORIZATION_ENABLED); } diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index a105c05849..51d36720bf 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -52,7 +52,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; -import org.apache.hadoop.hive.metastore.txn.TxnDbUtil; import org.apache.hadoop.hive.ql.QTestMiniClusters.FsType; import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache; import org.apache.hadoop.hive.ql.dataset.QTestDatasetHandler; @@ -198,9 +197,8 @@ public QTestUtil(QTestArguments testArgs) throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); } - QueryState queryState = new QueryState.Builder().withHiveConf(new HiveConf(IDriver.class)).build(); - conf = queryState.getConf(); - sem = new SemanticAnalyzer(queryState); + conf = new HiveConf(IDriver.class); + setMetaStoreProperties(); this.miniClusters.setup(testArgs, conf, getScriptsDir(), logDir); @@ -219,11 +217,23 @@ public QTestUtil(QTestArguments testArgs) throws Exception { this.initScript = scriptsDir + File.separator + testArgs.getInitScript(); this.cleanupScript = scriptsDir + File.separator + testArgs.getCleanupScript(); - postInit(); savedConf = new HiveConf(conf); } + private void setMetaStoreProperties() { + setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECT_URL_KEY); + setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECTION_DRIVER); + setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECTION_USER_NAME); + setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.PWD); + } + + private void setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars var) { + if (System.getProperty(var.getVarname()) != null) { + MetastoreConf.setVar(conf, var, System.getProperty(var.getVarname())); + } + } + private String getScriptsDir() { // Use the current directory if it is not specified String scriptsDir = conf.get("test.data.scripts"); @@ -418,8 +428,6 @@ public void clearTestSideEffects() throws Exception { clearUDFsCreatedDuringTests(); clearKeysCreatedInTests(); StatsSources.clearGlobalStats(); - TxnDbUtil.cleanDb(conf); - TxnDbUtil.prepDb(conf); dispatcher.afterTest(this); } @@ -441,11 +449,13 @@ public void cleanUp(String fileName) throws Exception { } conf.setBoolean("hive.test.shutdown.phase", true); - clearTablesCreatedDuringTests(); - clearUDFsCreatedDuringTests(); clearKeysCreatedInTests(); - cleanupFromFile(); + String metastoreDb = QTestSystemProperties.getMetaStoreDb(); + if (metastoreDb == null || "derby".equalsIgnoreCase(metastoreDb)) { + // otherwise, the docker container is already destroyed by this time + cleanupFromFile(); + } // delete any contents in the warehouse dir Path p = new Path(testWarehouse); @@ -520,9 +530,11 @@ private void initFromScript() throws IOException { } } - private void postInit() throws Exception { + public void postInit() throws Exception { miniClusters.postInit(conf); + sem = new SemanticAnalyzer(new QueryState.Builder().withHiveConf(conf).build()); + testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE); db = Hive.get(conf); diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java index 9a136e24f0..ab00bfa6e4 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java @@ -63,11 +63,6 @@ public void beforeClass() { .withCleanupScript(cleanupScript) .withLlapIo(false) .build()); - - qt.newSession(); - qt.cleanUp(); - qt.createSources(); - } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -102,6 +97,15 @@ public void shutdown() { } } + protected boolean shouldRunCreateScriptBeforeEveryTest() { + return true; + } + + @Override + protected QTestUtil getQt() { + return qt; + } + @Override public void runTest(String tname, String fname, String fpath) throws Exception { long startTime = System.currentTimeMillis(); diff --git pom.xml pom.xml index 068d048e94..7ac071d8cd 100644 --- pom.xml +++ pom.xml @@ -187,6 +187,7 @@ 0.9.3 0.9.3-1 2.12.1 + 2.5.0 2.3 1.5.6 1.10.19 diff --git ql/src/test/queries/clientpositive/create_func1.q ql/src/test/queries/clientpositive/create_func1.q index 2c6acfc291..1f3b3fff0c 100644 --- ql/src/test/queries/clientpositive/create_func1.q +++ ql/src/test/queries/clientpositive/create_func1.q @@ -1,6 +1,6 @@ --! qt:dataset:src --- qtest_get_java_boolean should already be created during test initialization +CREATE FUNCTION qtest_get_java_boolean AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean'; select qtest_get_java_boolean('true'), qtest_get_java_boolean('false') from src limit 1; describe function extended qtest_get_java_boolean; @@ -46,6 +46,7 @@ use mydb; -- unqualified function should resolve to one in mydb db select qtest_get_java_boolean('abc'), default.qtest_get_java_boolean('abc'), mydb.qtest_get_java_boolean('abc') from default.src limit 1; +drop function default.qtest_get_java_boolean; drop function mydb.qtest_get_java_boolean; drop database mydb cascade; diff --git ql/src/test/queries/clientpositive/partition_params_postgres.q ql/src/test/queries/clientpositive/partition_params_postgres.q new file mode 100644 index 0000000000..e2ad5302c7 --- /dev/null +++ ql/src/test/queries/clientpositive/partition_params_postgres.q @@ -0,0 +1,5 @@ +drop table if exists my_table; +create external table my_table (col1 int, col3 int) partitioned by (col2 string) STORED AS TEXTFILE TBLPROPERTIES ("serialization.format" = "1"); +insert into my_table VALUES(11, 201, "F"); +SELECT * from my_table; +describe formatted my_table; diff --git ql/src/test/results/clientpositive/create_func1.q.out ql/src/test/results/clientpositive/create_func1.q.out index 238d378cda..377b91ab54 100644 --- ql/src/test/results/clientpositive/create_func1.q.out +++ ql/src/test/results/clientpositive/create_func1.q.out @@ -1,3 +1,11 @@ +PREHOOK: query: CREATE FUNCTION qtest_get_java_boolean AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean' +PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: database:default +PREHOOK: Output: default.qtest_get_java_boolean +POSTHOOK: query: CREATE FUNCTION qtest_get_java_boolean AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean' +POSTHOOK: type: CREATEFUNCTION +POSTHOOK: Output: database:default +POSTHOOK: Output: default.qtest_get_java_boolean PREHOOK: query: select qtest_get_java_boolean('true'), qtest_get_java_boolean('false') from src limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -206,6 +214,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### ABC NULL ABC +PREHOOK: query: drop function default.qtest_get_java_boolean +PREHOOK: type: DROPFUNCTION +PREHOOK: Output: database:default +PREHOOK: Output: default.qtest_get_java_boolean +POSTHOOK: query: drop function default.qtest_get_java_boolean +POSTHOOK: type: DROPFUNCTION +POSTHOOK: Output: database:default +POSTHOOK: Output: default.qtest_get_java_boolean PREHOOK: query: drop function mydb.qtest_get_java_boolean PREHOOK: type: DROPFUNCTION PREHOOK: Output: database:mydb diff --git ql/src/test/results/clientpositive/llap/sysdb.q.out ql/src/test/results/clientpositive/llap/sysdb.q.out index 798b190d40..92f28d6f60 100644 --- ql/src/test/results/clientpositive/llap/sysdb.q.out +++ ql/src/test/results/clientpositive/llap/sysdb.q.out @@ -4911,7 +4911,6 @@ POSTHOOK: query: select func_name, func_type from funcs order by func_name, func POSTHOOK: type: QUERY POSTHOOK: Input: sys@funcs #### A masked pattern was here #### -qtest_get_java_boolean 1 PREHOOK: query: select constraint_name from key_constraints order by constraint_name limit 5 PREHOOK: type: QUERY PREHOOK: Input: sys@key_constraints diff --git ql/src/test/results/clientpositive/partition_params_postgres.q.out ql/src/test/results/clientpositive/partition_params_postgres.q.out new file mode 100644 index 0000000000..f536fd24ff --- /dev/null +++ ql/src/test/results/clientpositive/partition_params_postgres.q.out @@ -0,0 +1,76 @@ +PREHOOK: query: drop table if exists my_table +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists my_table +POSTHOOK: type: DROPTABLE +PREHOOK: query: create external table my_table (col1 int, col3 int) partitioned by (col2 string) STORED AS TEXTFILE TBLPROPERTIES ("serialization.format" = "1") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@my_table +POSTHOOK: query: create external table my_table (col1 int, col3 int) partitioned by (col2 string) STORED AS TEXTFILE TBLPROPERTIES ("serialization.format" = "1") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@my_table +PREHOOK: query: insert into my_table VALUES(11, 201, "F") +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@my_table +POSTHOOK: query: insert into my_table VALUES(11, 201, "F") +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@my_table@col2=F +POSTHOOK: Lineage: my_table PARTITION(col2=F).col1 SCRIPT [] +POSTHOOK: Lineage: my_table PARTITION(col2=F).col3 SCRIPT [] +PREHOOK: query: SELECT * from my_table +PREHOOK: type: QUERY +PREHOOK: Input: default@my_table +PREHOOK: Input: default@my_table@col2=F +#### A masked pattern was here #### +POSTHOOK: query: SELECT * from my_table +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my_table +POSTHOOK: Input: default@my_table@col2=F +#### A masked pattern was here #### +11 201 F +PREHOOK: query: describe formatted my_table +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@my_table +POSTHOOK: query: describe formatted my_table +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@my_table +# col_name data_type comment +col1 int +col3 int + +# Partition Information +# col_name data_type comment +col2 string + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: EXTERNAL_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + EXTERNAL TRUE + bucketing_version 2 + discover.partitions true + numFiles 1 + numPartitions 1 + numRows 1 + rawDataSize 6 + serialization.format 1 + totalSize 7 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 diff --git ql/src/test/results/clientpositive/show_functions.q.out ql/src/test/results/clientpositive/show_functions.q.out index 9db684579b..0453400e73 100644 --- ql/src/test/results/clientpositive/show_functions.q.out +++ ql/src/test/results/clientpositive/show_functions.q.out @@ -102,7 +102,6 @@ day dayofmonth dayofweek decode -default.qtest_get_java_boolean degrees dense_rank div @@ -538,7 +537,6 @@ day dayofmonth dayofweek decode -default.qtest_get_java_boolean degrees dense_rank div diff --git standalone-metastore/DEV-README standalone-metastore/DEV-README index 9c261171fb..ab5df26590 100644 --- standalone-metastore/DEV-README +++ standalone-metastore/DEV-README @@ -45,6 +45,12 @@ To run just one test, do mvn verify -DskipITests=false -Dit.test=ITestMysql -Dtest=nosuch +Supported databases for testing: +-Dit.test=ITestMysql +-Dit.test=ITestOracle +-Dit.test=ITestPostgres +-Dit.test=ITestSqlServer + You can download the Oracle driver at http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html You should download Oracle 11g Release 1, ojdbc6.jar diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java index 49e19adf71..d27323ac55 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java +++ standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java @@ -142,10 +142,10 @@ public String generateInitFileName(String toVersion) throws HiveMetaException { @Override public String getCreateUserScript() throws HiveMetaException { String createScript = CREATE_USER_PREFIX + "." + dbType + SQL_FILE_EXTENSION; + File scriptFile = new File(getMetaStoreScriptDir() + File.separatorChar + createScript); // check if the file exists - if (!(new File(getMetaStoreScriptDir() + File.separatorChar + - createScript).exists())) { - throw new HiveMetaException("Unable to find create user file, expected: " + createScript); + if (!scriptFile.exists()) { + throw new HiveMetaException("Unable to find create user file, expected: " + scriptFile.getAbsolutePath()); } return createScript; } diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/schematool/MetastoreSchemaTool.java standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/schematool/MetastoreSchemaTool.java index 27b0483c01..b58b0f0d89 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/schematool/MetastoreSchemaTool.java +++ standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/schematool/MetastoreSchemaTool.java @@ -196,8 +196,9 @@ protected boolean isVerbose() { return verbose; } - protected void setVerbose(boolean verbose) { + public MetastoreSchemaTool setVerbose(boolean verbose) { this.verbose = verbose; + return this; } protected void setDbOpts(String dbOpts) { @@ -302,7 +303,9 @@ protected void execSql(String scriptDir, String scriptFile) throws IOException, // Generate the beeline args per hive conf and execute the given script protected void execSql(String sqlScriptFile) throws IOException { - CommandBuilder builder = new CommandBuilder(conf, url, driver, userName, passWord, sqlScriptFile); + CommandBuilder builder = + new CommandBuilder(conf, url, driver, userName, passWord, sqlScriptFile) + .setVerbose(verbose); // run the script using SqlLine SqlLine sqlLine = new SqlLine(); @@ -351,6 +354,7 @@ protected String quote(String stmt) { protected final String sqlScriptFile; protected final String driver; protected final String url; + private boolean verbose = false; protected CommandBuilder(Configuration conf, String url, String driver, String userName, String password, String sqlScriptFile) throws IOException { @@ -363,12 +367,19 @@ protected CommandBuilder(Configuration conf, String url, String driver, String u this.sqlScriptFile = sqlScriptFile; } + public CommandBuilder setVerbose(boolean verbose) { + this.verbose = verbose; + return this; + } + public String[] buildToRun() throws IOException { return argsWith(password); } public String buildToLog() throws IOException { - logScript(); + if (verbose) { + logScript(); + } return StringUtils.join(argsWith(PASSWD_MASK), " "); } diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java index 6d8fd46f2a..c1a1629548 100644 --- standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java @@ -17,249 +17,30 @@ */ package org.apache.hadoop.hive.metastore.dbinstall; -import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.metastore.HiveMetaException; -import org.apache.hadoop.hive.metastore.tools.schematool.MetastoreSchemaTool; -import org.junit.After; +import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule; import org.junit.Assert; -import org.junit.Before; import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.TimeUnit; public abstract class DbInstallBase { - - private static final Logger LOG = LoggerFactory.getLogger(DbInstallBase.class); - - private static final String HIVE_USER = "hiveuser"; - protected static final String HIVE_DB = "hivedb"; private static final String FIRST_VERSION = "1.2.0"; - private static final int MAX_STARTUP_WAIT = 5 * 60 * 1000; - - protected abstract String getDockerContainerName(); - protected abstract String getDockerImageName(); - protected abstract String[] getDockerAdditionalArgs(); - protected abstract String getDbType(); - protected abstract String getDbRootUser(); - protected abstract String getDbRootPassword(); - protected abstract String getJdbcDriver(); - protected abstract String getJdbcUrl(); - /** - * URL to use when connecting as root rather than Hive - * @return URL - */ - protected abstract String getInitialJdbcUrl(); - - /** - * Determine if the docker container is ready to use. - * @param logOutput output of docker logs command - * @return true if ready, false otherwise - */ - protected abstract boolean isContainerReady(String logOutput); - protected abstract String getHivePassword(); - - @Before - public void runDockerContainer() throws IOException, InterruptedException { - if (runCmdAndPrintStreams(buildRunCmd(), 600) != 0) { - throw new RuntimeException("Unable to start docker container"); - } - long startTime = System.currentTimeMillis(); - ProcessResults pr; - do { - Thread.sleep(5000); - pr = runCmd(buildLogCmd(), 5); - if (pr.rc != 0) throw new RuntimeException("Failed to get docker logs"); - } while (startTime + MAX_STARTUP_WAIT >= System.currentTimeMillis() && !isContainerReady(pr.stdout)); - if (startTime + MAX_STARTUP_WAIT < System.currentTimeMillis()) { - throw new RuntimeException("Container failed to be ready in " + MAX_STARTUP_WAIT/1000 + - " seconds"); - } - MetastoreSchemaTool.setHomeDirForTesting(); - } - - @After - public void stopAndRmDockerContainer() throws IOException, InterruptedException { - if ("true".equalsIgnoreCase(System.getProperty("metastore.itest.no.stop.container"))) { - LOG.warn("Not stopping container " + getDockerContainerName() + " at user request, please " + - "be sure to shut it down before rerunning the test."); - return; - } - if (runCmdAndPrintStreams(buildStopCmd(), 60) != 0) { - throw new RuntimeException("Unable to stop docker container"); - } - if (runCmdAndPrintStreams(buildRmCmd(), 15) != 0) { - throw new RuntimeException("Unable to remove docker container"); - } - } - - private static class ProcessResults { - final String stdout; - final String stderr; - final int rc; - - public ProcessResults(String stdout, String stderr, int rc) { - this.stdout = stdout; - this.stderr = stderr; - this.rc = rc; - } - } - - private ProcessResults runCmd(String[] cmd, long secondsToWait) throws IOException, - InterruptedException { - LOG.info("Going to run: " + StringUtils.join(cmd, " ")); - Process proc = Runtime.getRuntime().exec(cmd); - if (!proc.waitFor(secondsToWait, TimeUnit.SECONDS)) { - throw new RuntimeException("Process " + cmd[0] + " failed to run in " + secondsToWait + - " seconds"); - } - BufferedReader reader = new BufferedReader(new InputStreamReader(proc.getInputStream())); - final StringBuilder lines = new StringBuilder(); - reader.lines() - .forEach(s -> lines.append(s).append('\n')); - - reader = new BufferedReader(new InputStreamReader(proc.getErrorStream())); - final StringBuilder errLines = new StringBuilder(); - reader.lines() - .forEach(s -> errLines.append(s).append('\n')); - return new ProcessResults(lines.toString(), errLines.toString(), proc.exitValue()); - } - - private int runCmdAndPrintStreams(String[] cmd, long secondsToWait) - throws InterruptedException, IOException { - ProcessResults results = runCmd(cmd, secondsToWait); - LOG.info("Stdout from proc: " + results.stdout); - LOG.info("Stderr from proc: " + results.stderr); - return results.rc; - } - - private int createUser() { - return new MetastoreSchemaTool().run(buildArray( - "-createUser", - "-dbType", - getDbType(), - "-userName", - getDbRootUser(), - "-passWord", - getDbRootPassword(), - "-hiveUser", - HIVE_USER, - "-hivePassword", - getHivePassword(), - "-hiveDb", - HIVE_DB, - "-url", - getInitialJdbcUrl(), - "-driver", - getJdbcDriver() - )); - } - - private int installLatest() { - return new MetastoreSchemaTool().run(buildArray( - "-initSchema", - "-dbType", - getDbType(), - "-userName", - HIVE_USER, - "-passWord", - getHivePassword(), - "-url", - getJdbcUrl(), - "-driver", - getJdbcDriver() - )); - } - - private int installAVersion(String version) { - return new MetastoreSchemaTool().run(buildArray( - "-initSchemaTo", - version, - "-dbType", - getDbType(), - "-userName", - HIVE_USER, - "-passWord", - getHivePassword(), - "-url", - getJdbcUrl(), - "-driver", - getJdbcDriver() - )); - } - - private int upgradeToLatest() { - return new MetastoreSchemaTool().run(buildArray( - "-upgradeSchema", - "-dbType", - getDbType(), - "-userName", - HIVE_USER, - "-passWord", - getHivePassword(), - "-url", - getJdbcUrl(), - "-driver", - getJdbcDriver() - )); - } - - protected String[] buildArray(String... strs) { - return strs; - } @Test public void install() { - Assert.assertEquals(0, createUser()); - Assert.assertEquals(0, installLatest()); + Assert.assertEquals(0, getRule().createUser()); + Assert.assertEquals(0, getRule().installLatest()); } @Test public void upgrade() throws HiveMetaException { - Assert.assertEquals(0, createUser()); - Assert.assertEquals(0, installAVersion(FIRST_VERSION)); - Assert.assertEquals(0, upgradeToLatest()); + Assert.assertEquals(0, getRule().createUser()); + Assert.assertEquals(0, getRule().installAVersion(FIRST_VERSION)); + Assert.assertEquals(0, getRule().upgradeToLatest()); } - private String[] buildRunCmd() { - List cmd = new ArrayList<>(4 + getDockerAdditionalArgs().length); - cmd.add("docker"); - cmd.add("run"); - cmd.add("--name"); - cmd.add(getDockerContainerName()); - cmd.addAll(Arrays.asList(getDockerAdditionalArgs())); - cmd.add(getDockerImageName()); - return cmd.toArray(new String[cmd.size()]); - } - - private String[] buildStopCmd() { - return buildArray( - "docker", - "stop", - getDockerContainerName() - ); - } + protected abstract DatabaseRule getRule(); - private String[] buildRmCmd() { - return buildArray( - "docker", - "rm", - getDockerContainerName() - ); - } - - private String[] buildLogCmd() { - return buildArray( - "docker", - "logs", - getDockerContainerName() - ); + protected String[] buildArray(String... strs) { + return strs; } } diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java index 9999d8d705..1c36468252 100644 --- standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java @@ -6,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

+ * + * http://www.apache.org/licenses/LICENSE-2.0 + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -17,66 +17,20 @@ */ package org.apache.hadoop.hive.metastore.dbinstall; -public class ITestMysql extends DbInstallBase { - - @Override - protected String getDockerImageName() { - return "mariadb:5.5"; - } - - @Override - protected String[] getDockerAdditionalArgs() { - return buildArray( - "-p", - "3306:3306", - "-e", - "MYSQL_ROOT_PASSWORD=" + getDbRootPassword(), - "-d" - ); - } - - @Override - protected String getDbType() { - return "mysql"; - } - - @Override - protected String getDbRootUser() { - return "root"; - } - - @Override - protected String getDbRootPassword() { - return "its-a-secret"; - } +import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule; +import org.apache.hadoop.hive.metastore.dbinstall.rules.Mysql; +import org.junit.Rule; - @Override - protected String getJdbcDriver() { - return org.mariadb.jdbc.Driver.class.getName(); - } - - @Override - protected String getJdbcUrl() { - return "jdbc:mysql://localhost:3306/" + HIVE_DB; - } - - @Override - protected String getInitialJdbcUrl() { - return "jdbc:mysql://localhost:3306/"; - } - - @Override - protected boolean isContainerReady(String logOutput) { - return logOutput.contains("MySQL init process done. Ready for start up."); - } +/** + * Mysql-specific DbInstallBase child test class. + */ +public class ITestMysql extends DbInstallBase { - @Override - protected String getDockerContainerName() { - return "metastore-test-mysql-install"; - } + @Rule + public final DatabaseRule databaseRule = new Mysql(); @Override - protected String getHivePassword() { - return "hivepassword"; + protected DatabaseRule getRule() { + return databaseRule; } } diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java index 5b93e0ffae..b2de064a76 100644 --- standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java @@ -6,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

+ * + * http://www.apache.org/licenses/LICENSE-2.0 + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -17,67 +17,20 @@ */ package org.apache.hadoop.hive.metastore.dbinstall; -public class ITestOracle extends DbInstallBase { - @Override - protected String getDockerContainerName() { - return "metastore-test-oracle-install"; - } - - @Override - protected String getDockerImageName() { - return "orangehrm/oracle-xe-11g"; - } - - @Override - protected String[] getDockerAdditionalArgs() { - return buildArray( - "-p", - "1521:1521", - "-e", - "DEFAULT_SYS_PASS=" + getDbRootPassword(), - "-e", - "ORACLE_ALLOW_REMOTE=true", - "-d" - ); - } - - @Override - protected String getDbType() { - return "oracle"; - } - - @Override - protected String getDbRootUser() { - return "SYS as SYSDBA"; - } +import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule; +import org.apache.hadoop.hive.metastore.dbinstall.rules.Oracle; +import org.junit.Rule; - @Override - protected String getDbRootPassword() { - return "oracle"; - } - - @Override - protected String getJdbcDriver() { - return "oracle.jdbc.OracleDriver"; - } - - @Override - protected String getJdbcUrl() { - return "jdbc:oracle:thin:@//localhost:1521/xe"; - } - - @Override - protected String getInitialJdbcUrl() { - return "jdbc:oracle:thin:@//localhost:1521/xe"; - } +/** + * Oracle-specific DbInstallBase child test class. + */ +public class ITestOracle extends DbInstallBase { - @Override - protected boolean isContainerReady(String logOutput) { - return logOutput.contains("Oracle started successfully!"); - } + @Rule + public final DatabaseRule databaseRule = new Oracle(); @Override - protected String getHivePassword() { - return "hivepassword"; + protected DatabaseRule getRule() { + return databaseRule; } } diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java index 9151ac766e..1e43d4f8ce 100644 --- standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java @@ -6,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

+ * + * http://www.apache.org/licenses/LICENSE-2.0 + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -17,66 +17,20 @@ */ package org.apache.hadoop.hive.metastore.dbinstall; -public class ITestPostgres extends DbInstallBase { - @Override - protected String getDockerContainerName() { - return "metastore-test-postgres-install"; - } - - @Override - protected String getDockerImageName() { - return "postgres:9.3"; - } - - @Override - protected String[] getDockerAdditionalArgs() { - return buildArray( - "-p", - "5432:5432", - "-e", - "POSTGRES_PASSWORD=" + getDbRootPassword(), - "-d" - - ); - } - - @Override - protected String getDbType() { - return "postgres"; - } - - @Override - protected String getDbRootUser() { - return "postgres"; - } +import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule; +import org.apache.hadoop.hive.metastore.dbinstall.rules.Postgres; +import org.junit.Rule; - @Override - protected String getDbRootPassword() { - return "its-a-secret"; - } - - @Override - protected String getJdbcDriver() { - return org.postgresql.Driver.class.getName(); - } - - @Override - protected String getJdbcUrl() { - return "jdbc:postgresql://localhost:5432/" + HIVE_DB; - } - - @Override - protected String getInitialJdbcUrl() { - return "jdbc:postgresql://localhost:5432/postgres"; - } +/** + * Postgres-specific DbInstallBase child test class. + */ +public class ITestPostgres extends DbInstallBase { - @Override - protected boolean isContainerReady(String logOutput) { - return logOutput.contains("database system is ready to accept connections"); - } + @Rule + public final DatabaseRule databaseRule = new Postgres(); @Override - protected String getHivePassword() { - return "hivepassword"; + protected DatabaseRule getRule() { + return databaseRule; } } diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java index 67b6eeeab2..6ec0e8764d 100644 --- standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java @@ -6,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

+ * + * http://www.apache.org/licenses/LICENSE-2.0 + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -17,68 +17,20 @@ */ package org.apache.hadoop.hive.metastore.dbinstall; -public class ITestSqlServer extends DbInstallBase { - @Override - protected String getDockerContainerName() { - return "metastore-test-mssql-install"; - } - - @Override - protected String getDockerImageName() { - return "microsoft/mssql-server-linux:2017-GA"; - } - - @Override - protected String[] getDockerAdditionalArgs() { - return buildArray( - "-p", - "1433:1433", - "-e", - "ACCEPT_EULA=Y", - "-e", - "SA_PASSWORD=" + getDbRootPassword(), - "-d" - ); - } - - @Override - protected String getDbType() { - return "mssql"; - } - - @Override - protected String getDbRootUser() { - return "SA"; - } +import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule; +import org.apache.hadoop.hive.metastore.dbinstall.rules.Mssql; +import org.junit.Rule; - @Override - protected String getDbRootPassword() { - return "Its-a-s3cret"; - } - - @Override - protected String getJdbcDriver() { - return com.microsoft.sqlserver.jdbc.SQLServerDriver.class.getName(); - //return "com.microsoft.sqlserver.jdbc.SQLServerDriver"; - } - - @Override - protected String getJdbcUrl() { - return "jdbc:sqlserver://localhost:1433;DatabaseName=" + HIVE_DB + ";"; - } - - @Override - protected String getInitialJdbcUrl() { - return "jdbc:sqlserver://localhost:1433"; - } +/** + * Mssql-specific DbInstallBase child test class. + */ +public class ITestSqlServer extends DbInstallBase { - @Override - protected boolean isContainerReady(String logOutput) { - return logOutput.contains("Recovery is complete. This is an informational message only. No user action is required."); - } + @Rule + public final DatabaseRule databaseRule = new Mssql(); @Override - protected String getHivePassword() { - return "h1vePassword!"; + protected DatabaseRule getRule() { + return databaseRule; } } diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/DatabaseRule.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/DatabaseRule.java new file mode 100644 index 0000000000..c1f49d8d46 --- /dev/null +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/DatabaseRule.java @@ -0,0 +1,288 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.dbinstall.rules; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.metastore.tools.schematool.MetastoreSchemaTool; +import org.junit.rules.ExternalResource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Abstract JUnit TestRule for different RDMBS types. + */ +public abstract class DatabaseRule extends ExternalResource { + private static final Logger LOG = LoggerFactory.getLogger(DatabaseRule.class); + + protected static final String HIVE_USER = "hiveuser"; + // used in most of the RDBMS configs, except MSSQL + protected static final String HIVE_PASSWORD = "hivepassword"; + protected static final String HIVE_DB = "hivedb"; + private static final int MAX_STARTUP_WAIT = 5 * 60 * 1000; + + public abstract String getHivePassword(); + + public abstract String getDockerImageName(); + + public abstract String[] getDockerAdditionalArgs(); + + public abstract String getDbType(); + + public abstract String getDbRootUser(); + + public abstract String getDbRootPassword(); + + public abstract String getJdbcDriver(); + + public abstract String getJdbcUrl(); + + private boolean verbose; + + public DatabaseRule setVerbose(boolean verbose) { + this.verbose = verbose; + return this; + }; + + public String getDb() { + return HIVE_DB; + }; + + /** + * URL to use when connecting as root rather than Hive + * + * @return URL + */ + public abstract String getInitialJdbcUrl(); + + /** + * Determine if the docker container is ready to use. + * + * @param logOutput output of docker logs command + * @return true if ready, false otherwise + */ + public abstract boolean isContainerReady(String logOutput); + + protected String[] buildArray(String... strs) { + return strs; + } + + private static class ProcessResults { + final String stdout; + final String stderr; + final int rc; + + public ProcessResults(String stdout, String stderr, int rc) { + this.stdout = stdout; + this.stderr = stderr; + this.rc = rc; + } + } + + @Override + public void before() throws Exception { //runDockerContainer + if (runCmdAndPrintStreams(buildRunCmd(), 600) != 0) { + throw new RuntimeException("Unable to start docker container"); + } + long startTime = System.currentTimeMillis(); + ProcessResults pr; + do { + Thread.sleep(5000); + pr = runCmd(buildLogCmd(), 5); + if (pr.rc != 0) { + throw new RuntimeException("Failed to get docker logs"); + } + } while (startTime + MAX_STARTUP_WAIT >= System.currentTimeMillis() && !isContainerReady(pr.stdout)); + if (startTime + MAX_STARTUP_WAIT < System.currentTimeMillis()) { + throw new RuntimeException("Container failed to be ready in " + MAX_STARTUP_WAIT/1000 + + " seconds"); + } + MetastoreSchemaTool.setHomeDirForTesting(); + } + + @Override + public void after() { // stopAndRmDockerContainer + if ("true".equalsIgnoreCase(System.getProperty("metastore.itest.no.stop.container"))) { + LOG.warn("Not stopping container " + getDockerContainerName() + " at user request, please " + + "be sure to shut it down before rerunning the test."); + return; + } + try { + if (runCmdAndPrintStreams(buildStopCmd(), 60) != 0) { + throw new RuntimeException("Unable to stop docker container"); + } + if (runCmdAndPrintStreams(buildRmCmd(), 15) != 0) { + throw new RuntimeException("Unable to remove docker container"); + } + } catch (InterruptedException | IOException e) { + e.printStackTrace(); + } + } + + protected String getDockerContainerName(){ + return String.format("metastore-test-%s-install", getDbType()); + }; + + private ProcessResults runCmd(String[] cmd, long secondsToWait) + throws IOException, InterruptedException { + LOG.info("Going to run: " + StringUtils.join(cmd, " ")); + Process proc = Runtime.getRuntime().exec(cmd); + if (!proc.waitFor(secondsToWait, TimeUnit.SECONDS)) { + throw new RuntimeException( + "Process " + cmd[0] + " failed to run in " + secondsToWait + " seconds"); + } + BufferedReader reader = new BufferedReader(new InputStreamReader(proc.getInputStream())); + final StringBuilder lines = new StringBuilder(); + reader.lines().forEach(s -> lines.append(s).append('\n')); + + reader = new BufferedReader(new InputStreamReader(proc.getErrorStream())); + final StringBuilder errLines = new StringBuilder(); + reader.lines().forEach(s -> errLines.append(s).append('\n')); + return new ProcessResults(lines.toString(), errLines.toString(), proc.exitValue()); + } + + private int runCmdAndPrintStreams(String[] cmd, long secondsToWait) + throws InterruptedException, IOException { + ProcessResults results = runCmd(cmd, secondsToWait); + LOG.info("Stdout from proc: " + results.stdout); + LOG.info("Stderr from proc: " + results.stderr); + return results.rc; + } + + private String[] buildRunCmd() { + List cmd = new ArrayList<>(4 + getDockerAdditionalArgs().length); + cmd.add("docker"); + cmd.add("run"); + cmd.add("--name"); + cmd.add(getDockerContainerName()); + cmd.addAll(Arrays.asList(getDockerAdditionalArgs())); + cmd.add(getDockerImageName()); + return cmd.toArray(new String[cmd.size()]); + } + + private String[] buildStopCmd() { + return buildArray( + "docker", + "stop", + getDockerContainerName() + ); + } + + private String[] buildRmCmd() { + return buildArray( + "docker", + "rm", + getDockerContainerName() + ); + } + + private String[] buildLogCmd() { + return buildArray( + "docker", + "logs", + getDockerContainerName() + ); + } + + public String getHiveUser(){ + return HIVE_USER; + } + + public int createUser() { + return new MetastoreSchemaTool().setVerbose(verbose).run(buildArray( + "-createUser", + "-dbType", + getDbType(), + "-userName", + getDbRootUser(), + "-passWord", + getDbRootPassword(), + "-hiveUser", + getHiveUser(), + "-hivePassword", + getHivePassword(), + "-hiveDb", + getDb(), + "-url", + getInitialJdbcUrl(), + "-driver", + getJdbcDriver() + )); + } + + public int installLatest() { + return new MetastoreSchemaTool().setVerbose(verbose).run(buildArray( + "-initSchema", + "-dbType", + getDbType(), + "-userName", + getHiveUser(), + "-passWord", + getHivePassword(), + "-url", + getJdbcUrl(), + "-driver", + getJdbcDriver() + )); + } + + public int installAVersion(String version) { + return new MetastoreSchemaTool().setVerbose(verbose).run(buildArray( + "-initSchemaTo", + version, + "-dbType", + getDbType(), + "-userName", + getHiveUser(), + "-passWord", + getHivePassword(), + "-url", + getJdbcUrl(), + "-driver", + getJdbcDriver() + )); + } + + public int upgradeToLatest() { + return new MetastoreSchemaTool().setVerbose(verbose).run(buildArray( + "-upgradeSchema", + "-dbType", + getDbType(), + "-userName", + HIVE_USER, + "-passWord", + getHivePassword(), + "-url", + getJdbcUrl(), + "-driver", + getJdbcDriver() + )); + } + + public void install() { + createUser(); + installLatest(); + } +} diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Derby.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Derby.java new file mode 100644 index 0000000000..6415d7ec9f --- /dev/null +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Derby.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.dbinstall.rules; + +import org.apache.hadoop.hive.metastore.tools.schematool.MetastoreSchemaTool; +import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; + +/** + * JUnit TestRule for Derby. + */ +public class Derby extends DatabaseRule { + + @Override + public String getDockerImageName() { + return null; + } + + @Override + public String[] getDockerAdditionalArgs() { + return null; + } + + @Override + public String getDbType() { + return "derby"; + } + + @Override + public String getDbRootUser() { + return "APP"; + } + + @Override + public String getHiveUser() { + return "APP"; + } + + @Override + public String getDbRootPassword() { + return "mine"; + } + + @Override + public String getHivePassword() { + return "mine"; + } + + @Override + public String getJdbcDriver() { + return "org.apache.derby.jdbc.EmbeddedDriver"; + } + + @Override + public String getJdbcUrl() { + return String.format("jdbc:derby:memory:%s/%s;create=true", System.getProperty("test.tmp.dir"), + getDb()); + } + + @Override + public String getInitialJdbcUrl() { + return String.format("jdbc:derby:memory:%s/%s;create=true", System.getProperty("test.tmp.dir"), + getDb()); + } + + public String getDb() { + return MetaStoreServerUtils.JUNIT_DATABASE_PREFIX; + }; + + @Override + public boolean isContainerReady(String logOutput) { + return true; + } + + @Override + public void before() throws Exception { + MetastoreSchemaTool.setHomeDirForTesting(); + } + + @Override + public void after() { + // no-op, no need for docker container for derby + } + + @Override + public int createUser() { + return 0; // no-op + } +} diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mssql.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mssql.java new file mode 100644 index 0000000000..f9994817e3 --- /dev/null +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mssql.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.dbinstall.rules; + +/** + * JUnit TestRule for Mssql. + */ +public class Mssql extends DatabaseRule { + + @Override + public String getDockerImageName() { + return "microsoft/mssql-server-linux:2017-GA"; + } + + @Override + public String[] getDockerAdditionalArgs() { + return buildArray( + "-p", + "1433:1433", + "-e", + "ACCEPT_EULA=Y", + "-e", + "SA_PASSWORD=" + getDbRootPassword(), + "-d" + ); + } + + @Override + public String getDbType() { + return "mssql"; + } + + @Override + public String getDbRootUser() { + return "SA"; + } + + @Override + public String getDbRootPassword() { + return "Its-a-s3cret"; + } + + @Override + public String getJdbcDriver() { + return com.microsoft.sqlserver.jdbc.SQLServerDriver.class.getName(); + // return "com.microsoft.sqlserver.jdbc.SQLServerDriver"; + } + + @Override + public String getJdbcUrl() { + return "jdbc:sqlserver://localhost:1433;DatabaseName=" + HIVE_DB + ";"; + } + + @Override + public String getInitialJdbcUrl() { + return "jdbc:sqlserver://localhost:1433"; + } + + @Override + public boolean isContainerReady(String logOutput) { + return logOutput.contains( + "Recovery is complete. This is an informational message only. No user action is required."); + } + + @Override + public String getHivePassword() { + return "h1vePassword!"; + } +} diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mysql.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mysql.java new file mode 100644 index 0000000000..c537d95470 --- /dev/null +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mysql.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.dbinstall.rules; + +/** + * JUnit TestRule for MySql. + */ +public class Mysql extends DatabaseRule { + + @Override + public String getDockerImageName() { + return "mariadb:5.5"; + } + + @Override + public String[] getDockerAdditionalArgs() { + return buildArray("-p", "3306:3306", "-e", "MYSQL_ROOT_PASSWORD=" + getDbRootPassword(), "-d"); + } + + @Override + public String getDbType() { + return "mysql"; + } + + @Override + public String getDbRootUser() { + return "root"; + } + + @Override + public String getDbRootPassword() { + return "its-a-secret"; + } + + @Override + public String getJdbcDriver() { + return org.mariadb.jdbc.Driver.class.getName(); + } + + @Override + public String getJdbcUrl() { + return "jdbc:mysql://localhost:3306/" + HIVE_DB; + } + + @Override + public String getInitialJdbcUrl() { + return "jdbc:mysql://localhost:3306/"; + } + + @Override + public boolean isContainerReady(String logOutput) { + return logOutput.contains("MySQL init process done. Ready for start up."); + } + + @Override + public String getHivePassword() { + return HIVE_PASSWORD; + } +} diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Oracle.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Oracle.java new file mode 100644 index 0000000000..0b070e19ac --- /dev/null +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Oracle.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.dbinstall.rules; + +/** + * JUnit TestRule for Oracle. + */ +public class Oracle extends DatabaseRule { + + @Override + public String getDockerImageName() { + return "orangehrm/oracle-xe-11g"; + } + + @Override + public String[] getDockerAdditionalArgs() { + return buildArray( + "-p", + "1521:1521", + "-e", + "DEFAULT_SYS_PASS=" + getDbRootPassword(), + "-e", + "ORACLE_ALLOW_REMOTE=true", + "-d" + ); + } + + @Override + public String getDbType() { + return "oracle"; + } + + @Override + public String getDbRootUser() { + return "SYS as SYSDBA"; + } + + @Override + public String getDbRootPassword() { + return "oracle"; + } + + @Override + public String getJdbcDriver() { + return "oracle.jdbc.OracleDriver"; + } + + @Override + public String getJdbcUrl() { + return "jdbc:oracle:thin:@//localhost:1521/xe"; + } + + @Override + public String getInitialJdbcUrl() { + return "jdbc:oracle:thin:@//localhost:1521/xe"; + } + + @Override + public boolean isContainerReady(String logOutput) { + return logOutput.contains("Oracle started successfully!"); + } + + @Override + public String getHivePassword() { + return HIVE_PASSWORD; + } +} diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Postgres.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Postgres.java new file mode 100644 index 0000000000..5840095a3f --- /dev/null +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Postgres.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.dbinstall.rules; + +/** + * JUnit TestRule for Postgres. + */ +public class Postgres extends DatabaseRule { + @Override + public String getDockerImageName() { + return "postgres:9.3"; + } + + @Override + public String[] getDockerAdditionalArgs() { + return buildArray("-p", "5432:5432", "-e", "POSTGRES_PASSWORD=" + getDbRootPassword(), "-d"); + } + + @Override + public String getDbType() { + return "postgres"; + } + + @Override + public String getDbRootUser() { + return "postgres"; + } + + @Override + public String getDbRootPassword() { + return "its-a-secret"; + } + + @Override + public String getJdbcDriver() { + return org.postgresql.Driver.class.getName(); + } + + @Override + public String getJdbcUrl() { + return "jdbc:postgresql://localhost:5432/" + HIVE_DB; + } + + @Override + public String getInitialJdbcUrl() { + return "jdbc:postgresql://localhost:5432/postgres"; + } + + @Override + public boolean isContainerReady(String logOutput) { + return logOutput.contains("database system is ready to accept connections"); + } + + @Override + public String getHivePassword() { + return HIVE_PASSWORD; + } +} diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestMetastoreSchemaTool.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestMetastoreSchemaTool.java index b4a0844be3..a93e23b245 100644 --- standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestMetastoreSchemaTool.java +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestMetastoreSchemaTool.java @@ -40,7 +40,7 @@ @Mock private Configuration conf; private MetastoreSchemaTool.CommandBuilder builder; - private String pasword = "reallySimplePassword"; + private String password = "reallySimplePassword"; @Before public void setup() throws IOException { @@ -49,7 +49,9 @@ public void setup() throws IOException { if (!file.exists()) { file.createNewFile(); } - builder = new MetastoreSchemaTool.CommandBuilder(conf, null, null, "testUser", pasword, scriptFile); + builder = + new MetastoreSchemaTool.CommandBuilder(conf, null, null, "testUser", password, scriptFile) + .setVerbose(false); } @After @@ -59,12 +61,12 @@ public void globalAssert() throws IOException { @Test public void shouldReturnStrippedPassword() throws IOException { - assertFalse(builder.buildToLog().contains(pasword)); + assertFalse(builder.buildToLog().contains(password)); } @Test public void shouldReturnActualPassword() throws IOException { String[] strings = builder.buildToRun(); - assertTrue(Arrays.asList(strings).contains(pasword)); + assertTrue(Arrays.asList(strings).contains(password)); } }