diff --git a/hbase-handler/src/test/queries/negative/cascade_dbdrop.q b/hbase-handler/src/test/queries/negative/cascade_dbdrop.q index 48be8cd..606a60c 100644 --- a/hbase-handler/src/test/queries/negative/cascade_dbdrop.q +++ b/hbase-handler/src/test/queries/negative/cascade_dbdrop.q @@ -15,14 +15,8 @@ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string") TBLPROPERTIES ("hbase.table.name" = "hbase_table_0", "external.table.purge" = "true"); -dfs -ls target/tmp/hbase/data/default/hbase_table_0; +dfs -ls ${hiveconf:hbase.rootdir}/data/default/hbase_table_0; DROP DATABASE IF EXISTS hbaseDB CASCADE; -dfs -ls target/tmp/hbase/data/default/hbase_table_0; - - - - - - +dfs -ls ${hiveconf:hbase.rootdir}/data/default/hbase_table_0; diff --git a/hbase-handler/src/test/queries/positive/hbase_handler_snapshot.q b/hbase-handler/src/test/queries/positive/hbase_handler_snapshot.q index e429071..fe59063 100644 --- a/hbase-handler/src/test/queries/positive/hbase_handler_snapshot.q +++ b/hbase-handler/src/test/queries/positive/hbase_handler_snapshot.q @@ -1,3 +1,5 @@ +set fs.defaultFS=${hiveconf:hbase.rootdir}; + --! qt:dataset:src_hbase set hive.stats.column.autogather=true; SET hive.hbase.snapshot.name=src_hbase_snapshot; @@ -8,3 +10,5 @@ SELECT * FROM src_hbase LIMIT 5; SELECT value FROM src_hbase LIMIT 5; select count(*) from src_hbase; + +reset fs.defaultFS; \ No newline at end of file diff --git a/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out b/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out index 803e35e..9f3d2c0 100644 --- a/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out +++ b/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out @@ -19,7 +19,9 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:hbasedb POSTHOOK: Output: hbaseDB@hbase_table_0 Found 3 items -#### A masked pattern was here #### +drwxr-xr-x - ### USER ### ### GROUP ### 0 ### HDFS DATE ### hdfs://### HDFS PATH ### +drwxr-xr-x - ### USER ### ### GROUP ### 0 ### HDFS DATE ### hdfs://### HDFS PATH ### +drwxr-xr-x - ### USER ### ### GROUP ### 0 ### HDFS DATE ### hdfs://### HDFS PATH ### PREHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE PREHOOK: type: DROPDATABASE PREHOOK: Input: database:hbasedb @@ -30,5 +32,5 @@ POSTHOOK: type: DROPDATABASE POSTHOOK: Input: database:hbasedb POSTHOOK: Output: database:hbasedb POSTHOOK: Output: hbasedb@hbase_table_0 -#### A masked pattern was here #### +Command -ls hdfs://### HDFS PATH ### failed with exit code = 1 Query returned non-zero code: 1, cause: null diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java index 31195c4..b7e563a 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java @@ -90,7 +90,18 @@ public CheckResults(String outDir, String logDir, MiniClusterType miniMr, String hadoopVer, String locationSubdir) throws Exception { - super(outDir, logDir, miniMr, null, hadoopVer, "", "", false); + super( + QTestArguments.QTestArgumentsBuilder.instance() + .withOutDir(outDir) + .withLogDir(logDir) + .withClusterType(miniMr) + .withConfDir(null) + .withHadoopVer(hadoopVer) + .withInitScript("") + .withCleanupScript("") + .withLlapIo(false) + .build()); + this.locationSubdir = locationSubdir; } } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java index 956478d..060e0cd 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.accumulo; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QTestArguments; import org.apache.hadoop.hive.ql.QTestUtil; /** @@ -29,7 +30,18 @@ public AccumuloQTestUtil(String outDir, String logDir, MiniClusterType miniMr, AccumuloTestSetup setup, String initScript, String cleanupScript) throws Exception { - super(outDir, logDir, miniMr, null, "0.20", initScript, cleanupScript, false); + super( + QTestArguments.QTestArgumentsBuilder.instance() + .withOutDir(outDir) + .withLogDir(logDir) + .withClusterType(miniMr) + .withConfDir(null) + .withHadoopVer("0.20") + .withInitScript(initScript) + .withCleanupScript(cleanupScript) + .withLlapIo(false) + .build()); + setup.setupWithHiveConf(conf); this.setup = setup; this.savedConf = new HiveConf(conf); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java index 3cf5ebb..64f2919 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.VariableSubstitution; +import org.apache.hadoop.hive.ql.QTestArguments; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; @@ -59,8 +60,18 @@ public void beforeClass() { String cleanupScript = cliConfig.getCleanupScript(); try { String hadoopVer = cliConfig.getHadoopVersion(); - qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, - hiveConfDir, hadoopVer, initScript, cleanupScript, true); + + qt = new QTestUtil( + QTestArguments.QTestArgumentsBuilder.instance() + .withOutDir(cliConfig.getResultsDir()) + .withLogDir(cliConfig.getLogDir()) + .withClusterType(miniMR) + .withConfDir(hiveConfDir) + .withHadoopVer(hadoopVer) + .withInitScript(initScript) + .withCleanupScript(cleanupScript) + .withLlapIo(true) + .build()); if (Strings.isNullOrEmpty(qt.getConf().get(HCONF_TEST_BLOBSTORE_PATH))) { fail(String.format("%s must be set. Try setting in blobstore-conf.xml", HCONF_TEST_BLOBSTORE_PATH)); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java index 1ead144..8413444 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java @@ -23,6 +23,7 @@ import java.io.File; import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.ql.QTestArguments; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; @@ -63,8 +64,18 @@ public void beforeClass() { qt = new ElapsedTimeLoggingWrapper() { @Override public QTestUtil invokeInternal() throws Exception { - return new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, - hiveConfDir, hadoopVer, initScript, cleanupScript, true, cliConfig.getFsType()); + return new QTestUtil( + QTestArguments.QTestArgumentsBuilder.instance() + .withOutDir(cliConfig.getResultsDir()) + .withLogDir(cliConfig.getLogDir()) + .withClusterType(miniMR) + .withConfDir(hiveConfDir) + .withHadoopVer(hadoopVer) + .withInitScript(initScript) + .withCleanupScript(cleanupScript) + .withLlapIo(true) + .withFsType(cliConfig.getFsType()) + .build()); } }.invoke("QtestUtil instance created", LOG, true); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java index 6b4c6c6..8ce4349 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.ql.QTestArguments; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; @@ -52,8 +53,17 @@ public void beforeClass() { String cleanupScript = cliConfig.getCleanupScript(); try { String hadoopVer = cliConfig.getHadoopVersion(); - qt = new QTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, - hiveConfDir, hadoopVer, initScript, cleanupScript, false); + qt = new QTestUtil( + QTestArguments.QTestArgumentsBuilder.instance() + .withOutDir(cliConfig.getResultsDir()) + .withLogDir(cliConfig.getLogDir()) + .withClusterType(miniMR) + .withConfDir(hiveConfDir) + .withHadoopVer(hadoopVer) + .withInitScript(initScript) + .withCleanupScript(cleanupScript) + .withLlapIo(false) + .build()); // do a one time initialization qt.newSession(); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java index 70cbf04..afb0ed6 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java @@ -34,7 +34,6 @@ public class CoreHBaseCliDriver extends CliAdapter { private HBaseQTestUtil qt; - private HBaseTestSetup setup = new HBaseTestSetup(); public CoreHBaseCliDriver(AbstractCliConfig testCliConfig) { super(testCliConfig); @@ -49,7 +48,8 @@ public void beforeClass() { try { qt = new HBaseQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, - setup, initScript, cleanupScript); + new HBaseTestSetup(), initScript, cleanupScript); + qt.newSession(); qt.cleanUp(null); qt.createSources(null); @@ -58,16 +58,15 @@ public void beforeClass() { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); System.err.flush(); - throw new RuntimeException(e); + fail("Unexpected exception in static initialization: "+e.getMessage()); } - } @Override @Before public void setUp() { try { - qt.newSession(); + qt.clearTestSideEffects(); } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -75,12 +74,12 @@ public void setUp() { fail("Unexpected exception in setup"); } } + @Override @After public void tearDown() { try { qt.clearPostTestEffects(); - qt.clearTestSideEffects(); } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -94,7 +93,6 @@ public void tearDown() { public void shutdown() throws Exception { try { qt.shutdown(); - setup.tearDown(); } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -108,9 +106,7 @@ public void runTest(String tname, String fname, String fpath) throws Exception { long startTime = System.currentTimeMillis(); try { System.err.println("Begin query: " + fname); - qt.addFile(fpath); - qt.cliInit(new File(fpath)); int ecode = qt.executeClient(fname); @@ -131,5 +127,6 @@ public void runTest(String tname, String fname, String fpath) throws Exception { System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s"); assertTrue("Test passed", true); } + } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java index c76a70e..9e720c8 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java @@ -34,7 +34,6 @@ public class CoreHBaseNegativeCliDriver extends CliAdapter { private HBaseQTestUtil qt; - private static HBaseTestSetup setup = new HBaseTestSetup(); public CoreHBaseNegativeCliDriver(AbstractCliConfig testCliConfig) { super(testCliConfig); @@ -48,21 +47,21 @@ public void beforeClass() throws Exception { try { qt = new HBaseQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, - setup, initScript, cleanupScript); + new HBaseTestSetup(), initScript, cleanupScript); + } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); System.err.flush(); - fail("Unexpected exception in setup"); + fail("Unexpected exception in static initialization: "+e.getMessage()); } } - // hmm..this looks a bit wierd...setup boots qtestutil...this part used to be in beforeclass @Override @Before public void setUp() { try { - qt.newSession(); + qt.clearTestSideEffects(); } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -76,7 +75,6 @@ public void setUp() { public void tearDown() { try { qt.clearPostTestEffects(); - qt.clearTestSideEffects(); } catch (Exception e) { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); @@ -94,10 +92,8 @@ public void shutdown() throws Exception { System.err.println("Exception: " + e.getMessage()); e.printStackTrace(); System.err.flush(); - fail("Unexpected exception in tearDown"); + fail("Unexpected exception in shutdown"); } - // closeHBaseConnections - setup.tearDown(); } @Override @@ -107,6 +103,7 @@ public void runTest(String tname, String fname, String fpath) throws Exception { System.err.println("Begin query: " + fname); qt.addFile(fpath); qt.cliInit(new File(fpath)); + int ecode = qt.executeClient(fname); if (ecode == 0) { qt.failed(fname, null); @@ -126,6 +123,5 @@ public void runTest(String tname, String fname, String fpath) throws Exception { assertTrue("Test passed", true); } - } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java index 07ae6ac..0807da1 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java @@ -22,6 +22,7 @@ import java.io.File; +import org.apache.hadoop.hive.ql.QTestArguments; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; @@ -47,8 +48,17 @@ public void beforeClass(){ try { String hadoopVer = cliConfig.getHadoopVersion(); - qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, - hiveConfDir, hadoopVer, initScript, cleanupScript, false); + qt = new QTestUtil( + QTestArguments.QTestArgumentsBuilder.instance() + .withOutDir(cliConfig.getResultsDir()) + .withLogDir(cliConfig.getLogDir()) + .withClusterType(miniMR) + .withConfDir(hiveConfDir) + .withHadoopVer(hadoopVer) + .withInitScript(initScript) + .withCleanupScript(cleanupScript) + .withLlapIo(false) + .build()); // do a one time initialization qt.newSession(); qt.cleanUp(); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java index 55e744e..badb4a5 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java @@ -26,6 +26,7 @@ import java.io.File; import org.apache.hadoop.hive.ql.MetaStoreDumpUtility; +import org.apache.hadoop.hive.ql.QTestArguments; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; @@ -63,9 +64,18 @@ public void beforeClass() { String cleanupScript = cliConfig.getCleanupScript(); try { String hadoopVer = cliConfig.getHadoopVersion(); - qt = new QTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, hiveConfDir, - hadoopVer, initScript, - cleanupScript, false, null); + + qt = new QTestUtil( + QTestArguments.QTestArgumentsBuilder.instance() + .withOutDir(cliConfig.getResultsDir()) + .withLogDir(cliConfig.getLogDir()) + .withClusterType(miniMR) + .withConfDir(hiveConfDir) + .withHadoopVer(hadoopVer) + .withInitScript(initScript) + .withCleanupScript(cleanupScript) + .withLlapIo(false) + .build()); // do a one time initialization qt.newSession(); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java index 07df0c9..6a6b100 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java @@ -20,7 +20,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.ql.QTestArguments; import org.apache.hadoop.hive.ql.QTestUtil; /** @@ -37,29 +37,25 @@ /** A handle to this harness's cluster */ private final Connection conn; - private HBaseTestSetup hbaseSetup = null; - public HBaseQTestUtil( String outDir, String logDir, MiniClusterType miniMr, HBaseTestSetup setup, String initScript, String cleanupScript) throws Exception { - super(outDir, logDir, miniMr, null, "0.20", initScript, cleanupScript, false); - hbaseSetup = setup; - hbaseSetup.preTest(conf); - this.conn = setup.getConnection(); - super.init(); - } - - @Override - public void init() throws Exception { - // defer - } + super( + QTestArguments.QTestArgumentsBuilder.instance() + .withOutDir(outDir) + .withLogDir(logDir) + .withClusterType(miniMr) + .withConfDir(null) + .withHadoopVer("0.20") + .withInitScript(initScript) + .withCleanupScript(cleanupScript) + .withLlapIo(false) + .withQTestSetup(setup) + .build()); - @Override - protected void initConfFromSetup() throws Exception { - super.initConfFromSetup(); - hbaseSetup.preTest(conf); + this.conn = setup.getConnection(); } @Override @@ -95,9 +91,4 @@ public void cleanUp(String tname) throws Exception { } } - @Override - public void clearTestSideEffects() throws Exception { - super.clearTestSideEffects(); - hbaseSetup.preTest(conf); - } } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java index 7b203a9..52c37ef 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java @@ -19,23 +19,25 @@ package org.apache.hadoop.hive.hbase; import java.io.IOException; -import java.net.ServerSocket; import java.util.Arrays; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.mapred.JobConf; import org.apache.zookeeper.Watcher; @@ -43,11 +45,11 @@ * HBaseTestSetup defines HBase-specific test fixtures which are * reused across testcases. */ -public class HBaseTestSetup { +public class HBaseTestSetup extends QTestUtil.QTestSetup { private MiniHBaseCluster hbaseCluster; + private HBaseTestingUtility util; private int zooKeeperPort; - private String hbaseRoot; private Connection hbaseConn; private static final int NUM_REGIONSERVERS = 1; @@ -56,13 +58,16 @@ public Connection getConnection() { return this.hbaseConn; } - void preTest(HiveConf conf) throws Exception { + @Override + public void preTest(HiveConf conf) throws Exception { + super.preTest(conf); setUpFixtures(conf); - conf.set("hbase.rootdir", hbaseRoot); - conf.set("hbase.master", hbaseCluster.getMaster().getServerName().getHostAndPort()); - conf.set("hbase.zookeeper.property.clientPort", Integer.toString(zooKeeperPort)); + // Set some properties since HiveConf gets recreated for the new query + Path hbaseRoot = util.getDefaultRootDirPath(); + conf.set(HConstants.HBASE_DIR, hbaseRoot.toUri().toString()); + String auxJars = conf.getAuxJars(); auxJars = (StringUtils.isBlank(auxJars) ? "" : (auxJars + ",")) + "file://" + new JobConf(conf, HBaseConfiguration.class).getJar(); @@ -76,30 +81,23 @@ private void setUpFixtures(HiveConf conf) throws Exception { * QTestUtil already starts it. */ int zkPort = conf.getInt("hive.zookeeper.client.port", -1); + conf.set(HConstants.ZOOKEEPER_CLIENT_PORT, Integer.toString(zkPort)); if ((zkPort == zooKeeperPort) && (hbaseCluster != null)) { return; } zooKeeperPort = zkPort; - String tmpdir = System.getProperty("test.tmp.dir"); this.tearDown(); - conf.set("hbase.master", "local"); - - hbaseRoot = "file:///" + tmpdir + "/hbase"; - conf.set("hbase.rootdir", hbaseRoot); - conf.set("hbase.zookeeper.property.clientPort", - Integer.toString(zooKeeperPort)); - Configuration hbaseConf = HBaseConfiguration.create(conf); - hbaseConf.setInt("hbase.master.port", findFreePort()); - hbaseConf.setInt("hbase.master.info.port", -1); - hbaseConf.setInt("hbase.regionserver.port", findFreePort()); - hbaseConf.setInt("hbase.regionserver.info.port", -1); // Fix needed due to dependency for hbase-mapreduce module + // Check CDH-59433 for details System.setProperty("org.apache.hadoop.hbase.shaded.io.netty.packagePrefix", "org.apache.hadoop.hbase.shaded."); - hbaseCluster = new MiniHBaseCluster(hbaseConf, NUM_REGIONSERVERS); - conf.set("hbase.master", hbaseCluster.getMaster().getServerName().getHostAndPort()); - hbaseConn = ConnectionFactory.createConnection(hbaseConf); + + Configuration hbaseConf = HBaseConfiguration.create(conf); + util = new HBaseTestingUtility(hbaseConf); + util.startMiniDFSCluster(1); + hbaseCluster = util.startMiniHBaseCluster(1, NUM_REGIONSERVERS); + hbaseConn = util.getConnection(); // opening the META table ensures that cluster is running Table meta = null; @@ -160,20 +158,10 @@ private void createHBaseTable() throws IOException { } } - private static int findFreePort() throws IOException { - ServerSocket server = new ServerSocket(0); - int port = server.getLocalPort(); - server.close(); - return port; - } - + @Override public void tearDown() throws Exception { - if (hbaseConn != null) { - hbaseConn.close(); - hbaseConn = null; - } if (hbaseCluster != null) { - hbaseCluster.shutdown(); + util.shutdownMiniCluster(); hbaseCluster = null; } } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestArguments.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestArguments.java new file mode 100644 index 0000000..e3794b4 --- /dev/null +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestArguments.java @@ -0,0 +1,210 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql; + +/** + * QTestArguments composite used as arguments holder for QTestUtil initialization. + */ +public class QTestArguments { + + private String outDir; + private String logDir; + private String confDir; + private QTestUtil.MiniClusterType clusterType; + private String hadoopVer; + private String initScript; + private String cleanupScript; + private boolean withLlapIo; + private QTestUtil.FsType fsType; + private QTestUtil.QTestSetup qtestSetup; + + public void setClusterType(QTestUtil.MiniClusterType clusterType) { + this.clusterType = clusterType; + } + + public QTestUtil.MiniClusterType getClusterType() { + return clusterType; + } + + public String getOutDir() { + return outDir; + } + + public void setOutDir(String outDir) { + this.outDir = outDir; + } + + public String getLogDir() { + return logDir; + } + + public void setLogDir(String logDir) { + this.logDir = logDir; + } + + public void setConfDir(String confDir) { + this.confDir = confDir; + } + + public String getConfDir() { + return confDir; + } + + public void setHadoopVer(String hadoopVer) { + this.hadoopVer = hadoopVer; + } + + public String getHadoopVer() { + return hadoopVer; + } + + public void setInitScript(String initScript) { + this.initScript = initScript; + } + + public String getInitScript() { + return initScript; + } + + public void setCleanupScript(String cleanupScript) { + this.cleanupScript = cleanupScript; + } + + public String getCleanupScript() { + return cleanupScript; + } + + public void setWithLlapIo(boolean withLlapIo) { + this.withLlapIo = withLlapIo; + } + + public boolean isWithLlapIo() { + return withLlapIo; + } + + public void setFsType(QTestUtil.FsType fsType) { + this.fsType = fsType; + } + + public QTestUtil.FsType getFsType() { + return fsType; + } + + public void setQTestSetup(QTestUtil.QTestSetup qtestSetup) { + this.qtestSetup = qtestSetup; + } + + public QTestUtil.QTestSetup getQTestSetup() { + return qtestSetup; + } + + /** + * QTestArgumentsBuilder used for QTestArguments construction. + */ + public static final class QTestArgumentsBuilder + { + private String outDir; + private String logDir; + private String confDir; + private QTestUtil.MiniClusterType clusterType; + private String hadoopVer; + private String initScript; + private String cleanupScript; + private boolean withLlapIo; + private QTestUtil.FsType fsType; + private QTestUtil.QTestSetup qtestSetup; + + private QTestArgumentsBuilder(){ + } + + public static QTestArgumentsBuilder instance(){ + return new QTestArgumentsBuilder(); + } + + public QTestArgumentsBuilder withOutDir(String outDir){ + this.outDir = outDir; + return this; + } + + public QTestArgumentsBuilder withLogDir(String logDir){ + this.logDir = logDir; + return this; + } + + public QTestArgumentsBuilder withConfDir(String confDir){ + this.confDir = confDir; + return this; + } + + public QTestArgumentsBuilder withClusterType(QTestUtil.MiniClusterType clusterType){ + this.clusterType = clusterType; + return this; + } + + + public QTestArgumentsBuilder withHadoopVer(String hadoopVer){ + this.hadoopVer = hadoopVer; + return this; + } + + public QTestArgumentsBuilder withInitScript(String initScript){ + this.initScript = initScript; + return this; + } + + public QTestArgumentsBuilder withCleanupScript(String cleanupScript){ + this.cleanupScript = cleanupScript; + return this; + } + + public QTestArgumentsBuilder withLlapIo(boolean withLlapIo){ + this.withLlapIo = withLlapIo; + return this; + } + + public QTestArgumentsBuilder withFsType(QTestUtil.FsType fsType){ + this.fsType = fsType; + return this; + } + + public QTestArgumentsBuilder withQTestSetup(QTestUtil.QTestSetup qtestSetup){ + this.qtestSetup = qtestSetup; + return this; + } + + public QTestArguments build(){ + QTestArguments testArguments = new QTestArguments(); + testArguments.setOutDir(outDir); + testArguments.setLogDir(logDir); + testArguments.setConfDir(confDir); + testArguments.setClusterType(clusterType); + testArguments.setHadoopVer(hadoopVer); + testArguments.setInitScript(initScript); + testArguments.setCleanupScript(cleanupScript); + testArguments.setWithLlapIo(withLlapIo); + testArguments.setFsType(fsType); + + testArguments.setQTestSetup( + qtestSetup != null ? qtestSetup : new QTestUtil.QTestSetup()); + + return testArguments; + } + } + +} diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 5adbb63..686eefa 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -484,36 +484,28 @@ private String getKeyProviderURI() { return "jceks://file" + new Path(keyDir, "test.jks").toUri(); } - public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, - String confDir, String hadoopVer, String initScript, String cleanupScript, - boolean withLlapIo) throws Exception { - this(outDir, logDir, clusterType, confDir, hadoopVer, initScript, cleanupScript, - withLlapIo, null); - } - - public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, - String confDir, String hadoopVer, String initScript, String cleanupScript, - boolean withLlapIo, FsType fsType) - throws Exception { + public QTestUtil(QTestArguments testArgs) throws Exception { LOG.info("Setting up QTestUtil with outDir={}, logDir={}, clusterType={}, confDir={}," + " hadoopVer={}, initScript={}, cleanupScript={}, withLlapIo={}," + - " fsType={}" - , outDir, logDir, clusterType, confDir, hadoopVer, initScript, cleanupScript, - withLlapIo, fsType); - Preconditions.checkNotNull(clusterType, "ClusterType cannot be null"); - if (fsType != null) { - this.fsType = fsType; + " fsType={}", + testArgs.getOutDir(), testArgs.getLogDir(), testArgs.getClusterType(), testArgs.getConfDir(), hadoopVer, + testArgs.getInitScript(), testArgs.getCleanupScript(), testArgs.isWithLlapIo(), testArgs.getFsType()); + + Preconditions.checkNotNull(testArgs.getClusterType(), "ClusterType cannot be null"); + if (testArgs.getFsType() != null) { + this.fsType = testArgs.getFsType(); } else { - this.fsType = clusterType.getDefaultFsType(); + this.fsType = testArgs.getClusterType().getDefaultFsType(); } - this.outDir = outDir; - this.logDir = logDir; + this.outDir = testArgs.getOutDir(); + this.logDir = testArgs.getLogDir(); this.srcUDFs = getSrcUDFs(); this.qOutProcessor = new QOutProcessor(fsType); // HIVE-14443 move this fall-back logic to CliConfigs - if (confDir != null && !confDir.isEmpty()) { - HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml")); + if (testArgs.getConfDir() != null && !testArgs.getConfDir().isEmpty()) { + HiveConf.setHiveSiteLocation(new URL( + "file://"+ new File(testArgs.getConfDir()).toURI().getPath() + "/hive-site.xml")); MetastoreConf.setHiveSiteLocation(HiveConf.getHiveSiteLocation()); System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation()); } @@ -521,29 +513,29 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, queryState = new QueryState.Builder().withHiveConf(new HiveConf(IDriver.class)).build(); conf = queryState.getConf(); this.hadoopVer = getHadoopMainVersion(hadoopVer); - qMap = new TreeMap(); - qSortSet = new HashSet(); - qSortQuerySet = new HashSet(); - qHashQuerySet = new HashSet(); - qSortNHashQuerySet = new HashSet(); - qNoSessionReuseQuerySet = new HashSet(); - qMaskStatsQuerySet = new HashSet(); - qMaskDataSizeQuerySet = new HashSet(); - qMaskLineageQuerySet = new HashSet(); - this.clusterType = clusterType; + qMap = new TreeMap<>(); + qSortSet = new HashSet<>(); + qSortQuerySet = new HashSet<>(); + qHashQuerySet = new HashSet<>(); + qSortNHashQuerySet = new HashSet<>(); + qNoSessionReuseQuerySet = new HashSet<>(); + qMaskStatsQuerySet = new HashSet<>(); + qMaskDataSizeQuerySet = new HashSet<>(); + qMaskLineageQuerySet = new HashSet<>(); + this.clusterType = testArgs.getClusterType(); HadoopShims shims = ShimLoader.getHadoopShims(); setupFileSystem(shims); - setup = new QTestSetup(); + this.setup = testArgs.getQTestSetup(); setup.preTest(conf); - setupMiniCluster(shims, confDir); + setupMiniCluster(shims, testArgs.getConfDir()); initConf(); - if (withLlapIo && (clusterType == MiniClusterType.none)) { + if (testArgs.isWithLlapIo() && (clusterType == MiniClusterType.none)) { LOG.info("initializing llap IO"); LlapProxy.initializeLlapIo(conf); } @@ -564,8 +556,8 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, String scriptsDir = getScriptsDir(); - this.initScript = scriptsDir + File.separator + initScript; - this.cleanupScript = scriptsDir + File.separator + cleanupScript; + this.initScript = scriptsDir + File.separator + testArgs.getInitScript(); + this.cleanupScript = scriptsDir + File.separator + testArgs.getCleanupScript(); overWrite = "true".equalsIgnoreCase(System.getProperty("test.output.overwrite")); @@ -1927,9 +1919,19 @@ public void run() { { QTestUtil[] qt = new QTestUtil[qfiles.length]; for (int i = 0; i < qfiles.length; i++) { - qt[i] = new QTestUtil(resDir, logDir, MiniClusterType.none, null, "0.20", - initScript == null ? defaultInitScript : initScript, - cleanupScript == null ? defaultCleanupScript : cleanupScript, false); + + qt[i] = new QTestUtil( + QTestArguments.QTestArgumentsBuilder.instance() + .withOutDir(resDir) + .withLogDir(logDir) + .withClusterType(MiniClusterType.none) + .withConfDir(null) + .withHadoopVer("0.20") + .withInitScript(initScript == null ? defaultInitScript : initScript) + .withCleanupScript(cleanupScript == null ? defaultCleanupScript : cleanupScript) + .withLlapIo(false) + .build()); + qt[i].addFile(qfiles[i], false); qt[i].clearTestSideEffects(); } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java index 8f5744d..77de3fa 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.cli.control.AbstractCliConfig; import org.apache.hadoop.hive.cli.control.CliAdapter; import org.apache.hadoop.hive.cli.control.CliConfigs; +import org.apache.hadoop.hive.ql.QTestArguments; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; @@ -55,8 +56,18 @@ public void beforeClass() { firstRun = true; try { String hadoopVer = cliConfig.getHadoopVersion(); - qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, null, - hadoopVer, initScript, cleanupScript, false); + qt = new QTestUtil( + QTestArguments.QTestArgumentsBuilder.instance() + .withOutDir(cliConfig.getResultsDir()) + .withLogDir(cliConfig.getLogDir()) + .withClusterType(miniMR) + .withConfDir(null) + .withHadoopVer(hadoopVer) + .withInitScript(initScript) + .withCleanupScript(cleanupScript) + .withLlapIo(false) + .build()); + qt.newSession(); } catch (Exception e) { System.err.println("Exception: " + e.getMessage());