diff --git ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
index f372d7c..0989d65 100644
--- ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
+++ ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
@@ -153,6 +153,8 @@ public boolean accept(File filePath) {
private String useHBaseMetastore;
+ private String attemptCache;
+
public void setHadoopVersion(String ver) {
this.hadoopVersion = ver;
}
@@ -233,6 +235,14 @@ public void setUseHBaseMetastore(String useHBaseMetastore) {
this.useHBaseMetastore = useHBaseMetastore;
}
+ public String getAttemptCache() {
+ return attemptCache;
+ }
+
+ public void setAttemptCache(String attemptCache) {
+ this.attemptCache = attemptCache;
+ }
+
public void setHiveRootDirectory(File hiveRootDirectory) {
try {
this.hiveRootDirectory = hiveRootDirectory.getCanonicalPath();
@@ -543,6 +553,7 @@ public void execute() throws BuildException {
ctx.put("initScript", initScript);
ctx.put("cleanupScript", cleanupScript);
ctx.put("useHBaseMetastore", useHBaseMetastore);
+ ctx.put("attemptCache", attemptCache);
File outFile = new File(outDir, className + ".java");
FileWriter writer = new FileWriter(outFile);
diff --git itests/qtest-spark/pom.xml itests/qtest-spark/pom.xml
index b5c371a..e5b4663 100644
--- itests/qtest-spark/pom.xml
+++ itests/qtest-spark/pom.xml
@@ -397,6 +397,7 @@
logFile="${project.build.directory}/testsparkclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientpositive/spark"
initScript="q_test_init.sql"
+ attemptCache="true"
cleanupScript="q_test_cleanup.sql"/>
@@ -415,6 +416,7 @@
logFile="${project.build.directory}/testminisparkonyarnclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientpositive/spark"
initScript="q_test_init.sql"
+ attemptCache="false"
cleanupScript="q_test_cleanup.sql"/>
diff --git itests/qtest/pom.xml itests/qtest/pom.xml
index c747302..fb4c10f 100644
--- itests/qtest/pom.xml
+++ itests/qtest/pom.xml
@@ -440,6 +440,7 @@
logDirectory="${project.build.directory}/qfile-results/clientpositive/"
hadoopVersion="${hadoop.version}"
initScript="${initScript}"
+ attemptCache="true"
cleanupScript="q_test_cleanup.sql"/>
@@ -509,6 +510,7 @@
logDirectory="${project.build.directory}/qfile-results/clientpositive/"
hadoopVersion="${hadoop.version}"
initScript="${initScript}"
+ attemptCache="false"
cleanupScript="q_test_cleanup.sql"/>
@@ -661,6 +666,7 @@
logDirectory="${project.build.directory}/qfile-results/contribclientpositive"
hadoopVersion="${hadoop.version}"
initScript="${initScript}"
+ attemptCache="false"
cleanupScript="q_test_cleanup.sql"/>
= 3) {
+ // Turn this on only if the path is absolute, and is at least 3 deep - since we'll be deleting files later.
+ localFs = FileSystem.getLocal(conf).getRaw();
+ assert(TEST_BUILD_DIR != null);
+ cachedDataPath = new Path(TEST_BUILD_DIR, CACHED_DATA_DIR_NAME);
+ cachedDataPath = new Path(cachedDataPath, driverName);
+ LOG.info("Using cachedDataPath: " + cachedDataPath);
+ } else {
+ LOG.warn(
+ "Disableing attempted cache usage since metastore path may not be absolute, or depth is < 3. MetaStorePath={}",
+ metaStorePathString);
+ metaStorePath = null;
+ attemptingCacheUsage = false;
+ }
+
+ }
+ }
String execEngine = conf.get("hive.execution.engine");
conf.set("hive.execution.engine", "mr");
SessionState.start(conf);
conf.set("hive.execution.engine", execEngine);
- db = Hive.get(conf);
- drv = new Driver(conf);
- drv.init();
- pd = new ParseDriver();
- sem = new SemanticAnalyzer(queryState);
+
+ if (!attemptingCacheUsage) {
+ setupDbsEtc(true, true);
+ }
+ }
+
+ private void setupDbsEtc(boolean force, boolean isNewDb) throws HiveException, IOException {
+ if (!dbEtcSetup || force) {
+ if (isNewDb) {
+ // In case this is a new db, cleanup everything in case some entity created it.
+ Hive.closeCurrent();
+ cleanupMetastoreDir();
+ db = Hive.get(conf);
+ } else {
+ db = Hive.getWithFastCheck(conf, false);
+ }
+ LOG.info("Obtained db");
+ drv = new Driver(conf);
+ resetParser();
+ dbEtcSetup = true;
+ }
}
public void init(String tname) throws Exception {
@@ -947,8 +1161,9 @@ public void cliInit(String tname) throws Exception {
public String cliInit(String tname, boolean recreate) throws Exception {
if (recreate) {
cleanUp(tname);
- createSources(tname);
+ createSources(tname, true);
}
+ setupDbsEtc(false, true);
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
"org.apache.hadoop.hive.ql.security.DummyAuthenticator");
diff --git pom.xml pom.xml
index 2971d63..5de7991 100644
--- pom.xml
+++ pom.xml
@@ -76,6 +76,7 @@
${maven.test.classpath}
file://
${project.build.directory}/tmp
+ ${project.build.directory}
file://${test.tmp.dir}
${project.build.directory}/warehouse
pfile://
@@ -1030,6 +1031,7 @@
${basedir}/${hive.path.to.root}/data/files
${test.tmp.dir}
${test.tmp.dir.uri}
+ ${test.build.dir}
${test.dfs.mkdir}
${test.output.overwrite}
${test.warehouse.scheme}${test.warehouse.dir}
diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index ee6c564..6e57d84 100644
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -63,6 +63,7 @@
import org.apache.hadoop.hive.common.HiveStatsUtils;
import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.common.StatsSetupConst;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -263,7 +264,21 @@ public static Hive get(Configuration c, Class> clazz) throws HiveException {
*
*/
public static Hive get(HiveConf c) throws HiveException {
- return getInternal(c, false, false, true);
+ return getInternal(c, false, false, true, true);
+ }
+
+ /**
+ * Gets hive object for the current thread. If one is not initialized, then this method will
+ * return null.
+ * Otherwise thie method behaves the same as get(HiveConf c)
+ *
+ * @param c new Hive Configuration
+ * @return Hive object for current thread
+ * @throws HiveException
+ */
+ @InterfaceAudience.Private
+ public static Hive getNoCreateIfNull(HiveConf c) throws HiveException {
+ return getInternal(c, false, false, true, false);
}
/**
@@ -279,17 +294,17 @@ public static Hive getWithFastCheck(HiveConf c) throws HiveException {
* MS client, assuming the relevant settings would be unchanged within the same conf object.
*/
public static Hive getWithFastCheck(HiveConf c, boolean doRegisterAllFns) throws HiveException {
- return getInternal(c, false, true, doRegisterAllFns);
+ return getInternal(c, false, true, doRegisterAllFns, true);
}
private static Hive getInternal(HiveConf c, boolean needsRefresh, boolean isFastCheck,
- boolean doRegisterAllFns) throws HiveException {
+ boolean doRegisterAllFns, boolean createIfNull) throws HiveException {
Hive db = hiveDB.get();
- if (db == null || !db.isCurrentUserOwner() || needsRefresh
+ if ((db == null && createIfNull) || !db.isCurrentUserOwner() || needsRefresh
|| (c != null && db.metaStoreClient != null && !isCompatible(db, c, isFastCheck))) {
return create(c, false, db, doRegisterAllFns);
}
- if (c != null) {
+ if (c != null && db != null) {
db.conf = c;
}
return db;
@@ -329,7 +344,7 @@ public static Hive get() throws HiveException {
}
public static Hive get(boolean doRegisterAllFns) throws HiveException {
- return getInternal(null, false, false, doRegisterAllFns);
+ return getInternal(null, false, false, doRegisterAllFns, true);
}
/**
@@ -343,7 +358,7 @@ public static Hive get(boolean doRegisterAllFns) throws HiveException {
* @throws HiveException
*/
public static Hive get(HiveConf c, boolean needsRefresh) throws HiveException {
- return getInternal(c, needsRefresh, false, true);
+ return getInternal(c, needsRefresh, false, true, true);
}
public static void set(Hive hive) {
diff --git ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 96c826b..4589617 100644
--- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -1500,7 +1500,7 @@ public void close() throws IOException {
private void unCacheDataNucleusClassLoaders() {
try {
- Hive threadLocalHive = Hive.get(sessionConf);
+ Hive threadLocalHive = Hive.getNoCreateIfNull(sessionConf);
if ((threadLocalHive != null) && (threadLocalHive.getMSC() != null)
&& (threadLocalHive.getMSC().isLocalMetaStore())) {
if (sessionConf.getVar(ConfVars.METASTORE_RAW_STORE_IMPL).equals(ObjectStore.class.getName())) {
diff --git ql/src/test/templates/TestCliDriver.vm ql/src/test/templates/TestCliDriver.vm
index 0ccedce..427d972 100644
--- ql/src/test/templates/TestCliDriver.vm
+++ ql/src/test/templates/TestCliDriver.vm
@@ -17,23 +17,34 @@
*/
package org.apache.hadoop.hive.cli;
+import com.google.common.base.Stopwatch;
import org.apache.hadoop.hive.ql.QTestUtil;
import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
+import java.util.concurrent.TimeUnit;
+
public class $className {
+ private static final Logger LOG = LoggerFactory.getLogger(${className}.class);
+
private static final String HIVE_ROOT = QTestUtil.ensurePathEndsInSlash(System.getProperty("hive.root"));
private static QTestUtil qt;
static {
+ Stopwatch stopwatch = new Stopwatch().start();
+ String message = "Starting TestCliDriver run at " + System.currentTimeMillis();
+ LOG.info(message);
+ System.err.println(message);
MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
String hiveConfDir = "$hiveConfDir";
String initScript = "$initScript";
@@ -44,15 +55,30 @@ public class $className {
if (!hiveConfDir.isEmpty()) {
hiveConfDir = HIVE_ROOT + hiveConfDir;
}
+ // TODO Is ZK startup required for TestCliDriver
+ // TODO Is LlapIo enabled required for TestCliDriver
qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR,
- hiveConfDir, hadoopVer, initScript, cleanupScript, useHBaseMetastore, true);
+ hiveConfDir, hadoopVer, initScript, cleanupScript, useHBaseMetastore, true, "$className",
+ $attemptCache);
+ message = "QTestUtil instance created. ElapsedTimeSinceStart=" + stopwatch.elapsed(
+ TimeUnit.MILLISECONDS);
+ LOG.info(message);
+ System.err.println(message);
// do a one time initialization
qt.cleanUp();
+ message = "Initialization cleanup done. ElapsedTimeSinceStart=" + stopwatch.elapsed(TimeUnit.MILLISECONDS);
+ LOG.info(message);
+ System.err.println(message);
+
qt.createSources();
+ message = "Initialization createSources done. ElapsedTimeSinceStart=" + stopwatch.elapsed(TimeUnit.MILLISECONDS);
+ LOG.info(message);
+ System.err.println(message);
} catch (Exception e) {
- System.err.println("Exception: " + e.getMessage());
+ System.err.println("Exception: " + e.getMessage() + ". ElapsedTimeSinceStart="
+ + stopwatch.elapsed(TimeUnit.MILLISECONDS));
e.printStackTrace();
System.err.flush();
fail("Unexpected exception in static initialization: "+e.getMessage());
@@ -62,6 +88,7 @@ public class $className {
@Before
public void setUp() {
try {
+ // TODO This restarts ZK for each test. Is that requried ?
qt.clearTestSideEffects();
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
@@ -113,7 +140,9 @@ public class $className {
private void runTest(String tname, String fname, String fpath) throws Exception {
long startTime = System.currentTimeMillis();
try {
- System.err.println("Begin query: " + fname);
+ String message = "Begin query: " + fname + ", startTime=" + startTime;
+ System.err.println(message);
+ LOG.info(message);
qt.addFile(fpath);
@@ -137,7 +166,9 @@ public class $className {
}
long elapsedTime = System.currentTimeMillis() - startTime;
- System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s");
+ String message = "Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s";
+ System.err.println(message);
+ LOG.info(message);
assertTrue("Test passed", true);
}
}