diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java index 53f0d0e..5ceb3d2 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java @@ -19,7 +19,6 @@ import java.io.File; import java.lang.reflect.Field; -import java.util.Random; import junit.framework.TestCase; @@ -32,6 +31,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.ObjectStore; import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; @@ -96,8 +96,9 @@ public void testVersionRestriction () throws Exception { // session creation should fail since the schema didn't get created try { SessionState.start(new CliSessionState(hiveConf)); - fail("Expected exception"); - } catch (RuntimeException re) { + Hive.get(hiveConf).getMSC(); + fail("An exception is expected since schema is not created."); + } catch (Exception re) { LOG.info("Exception in testVersionRestriction: " + re, re); String msg = HiveStringUtils.stringifyException(re); assertTrue("Expected 'Version information not found in metastore' in: " + msg, msg diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreMetrics.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreMetrics.java index 3ed88f2..aefafe0 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreMetrics.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreMetrics.java @@ -41,8 +41,6 @@ */ public class TestHBaseMetastoreMetrics extends HBaseIntegrationTests { - private CodahaleMetrics metrics; - @BeforeClass public static void startup() throws Exception { HBaseIntegrationTests.startMiniCluster(); @@ -66,7 +64,6 @@ public void before() throws IOException { conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); SessionState.start(new CliSessionState(conf)); driver = new Driver(conf); - metrics = (CodahaleMetrics) MetricsFactory.getInstance(); } @Test @@ -107,6 +104,7 @@ public void testMetaDataCounts() throws Exception { driver.run("use default"); driver.run("drop database tempdb cascade"); + CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); String json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_DATABASES, 2); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_TABLES, 7); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java index 3ff5742..70c0b13 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java @@ -39,12 +39,14 @@ /** A handle to this harness's cluster */ private final HConnection conn; + private HBaseTestSetup setup; + public HBaseQTestUtil( String outDir, String logDir, MiniClusterType miniMr, HBaseTestSetup setup, String initScript, String cleanupScript) throws Exception { - super(outDir, logDir, miniMr, null, "0.20", initScript, cleanupScript, false, false); + this.setup = setup; setup.preTest(conf); this.conn = setup.getConnection(); super.init(); @@ -69,6 +71,12 @@ public void init() throws Exception { } @Override + protected void initConfFromSetup() throws Exception { + super.initConfFromSetup(); + setup.preTest(conf); + } + + @Override public void createSources(String tname) throws Exception { super.createSources(tname); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java index e6383dc..cee7158 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java @@ -22,9 +22,6 @@ import java.net.ServerSocket; import java.util.Arrays; -import junit.extensions.TestSetup; -import junit.framework.Test; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 5e81e98..d851faf 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -505,6 +505,7 @@ public void shutdown() throws Exception { dfs.shutdown(); dfs = null; } + Hive.closeCurrent(); } public String readEntireFileIntoString(File queryFile) throws IOException { @@ -726,8 +727,9 @@ public void clearTablesCreatedDuringTests() throws Exception { return; } - db.getConf().set("hive.metastore.filter.hook", + conf.set("hive.metastore.filter.hook", "org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl"); + db = Hive.get(conf); // Delete any tables other than the source tables // and any databases other than the default database. for (String dbName : db.getAllDatabases()) { @@ -795,16 +797,20 @@ public void clearTestSideEffects() throws Exception { return; } - clearTablesCreatedDuringTests(); - clearKeysCreatedInTests(); - // allocate and initialize a new conf since a test can // modify conf by using 'set' commands conf = new HiveConf(Driver.class); initConf(); + initConfFromSetup(); + // renew the metastore since the cluster type is unencrypted db = Hive.get(conf); // propagate new conf to meta store + clearTablesCreatedDuringTests(); + clearKeysCreatedInTests(); + } + + protected void initConfFromSetup() throws Exception { setup.preTest(conf); } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index cdd12ab..9107b1c 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -178,7 +178,7 @@ private boolean isConnected = false; private URI metastoreUris[]; private final HiveMetaHookLoader hookLoader; - protected final HiveConf conf; + protected final HiveConf conf; // Keep a copy of HiveConf so if Session conf changes, we may need to get a new HMS client. protected boolean fastpath = false; private String tokenStrForm; private final boolean localMetaStore; @@ -204,9 +204,10 @@ public HiveMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader, Boolean this.hookLoader = hookLoader; if (conf == null) { - conf = new HiveConf(HiveMetaStoreClient.class); + this.conf = conf = new HiveConf(HiveMetaStoreClient.class); + } else { + this.conf = new HiveConf(conf); } - this.conf = conf; filterHook = loadFilterHooks(); fileMetadataBatchSize = HiveConf.getIntVar( conf, HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_OBJECTS_MAX); @@ -221,10 +222,10 @@ public HiveMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader, Boolean // instantiate the metastore server handler directly instead of connecting // through the network if (conf.getBoolVar(ConfVars.METASTORE_FASTPATH)) { - client = new HiveMetaStore.HMSHandler("hive client", conf, true); + client = new HiveMetaStore.HMSHandler("hive client", this.conf, true); fastpath = true; } else { - client = HiveMetaStore.newRetryingHMSHandler("hive client", conf, true); + client = HiveMetaStore.newRetryingHMSHandler("hive client", this.conf, true); } isConnected = true; snapshotActiveConf(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 78bbb1f..32fe4fd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -546,10 +546,6 @@ private static void start(SessionState startSs, boolean isAsync, LogHelper conso // Get the following out of the way when you start the session these take a // while and should be done when we start up. try { - // Hive object instance should be created with a copy of the conf object. If the conf is - // shared with SessionState, other parts of the code might update the config, but - // Hive.get(HiveConf) would not recognize the case when it needs refreshing - Hive.get(new HiveConf(startSs.conf)).getMSC(); UserGroupInformation sessionUGI = Utils.getUGI(); FileSystem.get(startSs.conf); @@ -575,10 +571,6 @@ private static void start(SessionState startSs, boolean isAsync, LogHelper conso } } catch (RuntimeException e) { throw e; - } catch (Hive.SchemaException e) { - RuntimeException ex = new RuntimeException(e.getMessage()); - ex.setStackTrace(new StackTraceElement[0]); - throw ex; } catch (Exception e) { // Catch-all due to some exec time dependencies on session state // that would cause ClassNoFoundException otherwise