diff --git ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
index f372d7c..0989d65 100644
--- ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
+++ ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
@@ -153,6 +153,8 @@ public boolean accept(File filePath) {
private String useHBaseMetastore;
+ private String attemptCache;
+
public void setHadoopVersion(String ver) {
this.hadoopVersion = ver;
}
@@ -233,6 +235,14 @@ public void setUseHBaseMetastore(String useHBaseMetastore) {
this.useHBaseMetastore = useHBaseMetastore;
}
+ public String getAttemptCache() {
+ return attemptCache;
+ }
+
+ public void setAttemptCache(String attemptCache) {
+ this.attemptCache = attemptCache;
+ }
+
public void setHiveRootDirectory(File hiveRootDirectory) {
try {
this.hiveRootDirectory = hiveRootDirectory.getCanonicalPath();
@@ -543,6 +553,7 @@ public void execute() throws BuildException {
ctx.put("initScript", initScript);
ctx.put("cleanupScript", cleanupScript);
ctx.put("useHBaseMetastore", useHBaseMetastore);
+ ctx.put("attemptCache", attemptCache);
File outFile = new File(outDir, className + ".java");
FileWriter writer = new FileWriter(outFile);
diff --git itests/qtest-spark/pom.xml itests/qtest-spark/pom.xml
index 6900e11..d6d23c7 100644
--- itests/qtest-spark/pom.xml
+++ itests/qtest-spark/pom.xml
@@ -397,6 +397,7 @@
logFile="${project.build.directory}/testsparkclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientpositive/spark"
initScript="q_test_init.sql"
+ attemptCache="false"
cleanupScript="q_test_cleanup.sql"/>
@@ -415,6 +416,7 @@
logFile="${project.build.directory}/testminisparkonyarnclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientpositive/spark"
initScript="q_test_init.sql"
+ attemptCache="false"
cleanupScript="q_test_cleanup.sql"/>
diff --git itests/qtest/pom.xml itests/qtest/pom.xml
index 53b8549..6845063 100644
--- itests/qtest/pom.xml
+++ itests/qtest/pom.xml
@@ -440,6 +440,7 @@
logDirectory="${project.build.directory}/qfile-results/clientpositive/"
hadoopVersion="${hadoop.version}"
initScript="${initScript}"
+ attemptCache="true"
cleanupScript="q_test_cleanup.sql"/>
@@ -509,6 +510,7 @@
logDirectory="${project.build.directory}/qfile-results/clientpositive/"
hadoopVersion="${hadoop.version}"
initScript="${initScript}"
+ attemptCache="false"
cleanupScript="q_test_cleanup.sql"/>
@@ -661,6 +666,7 @@
logDirectory="${project.build.directory}/qfile-results/contribclientpositive"
hadoopVersion="${hadoop.version}"
initScript="${initScript}"
+ attemptCache="false"
cleanupScript="q_test_cleanup.sql"/>
= 3) {
+ // Turn this on only if the path is absolute, and is at least 3 deep - since we'll be deleting files later.
+ localFs = FileSystem.getLocal(conf).getRaw();
+ assert(TEST_BUILD_DIR != null);
+ cachedDataPath = new Path(TEST_BUILD_DIR, CACHED_DATA_DIR_NAME);
+ cachedDataPath = new Path(cachedDataPath, driverName);
+ LOG.info("Using cachedDataPath: " + cachedDataPath);
+ } else {
+ LOG.warn(
+ "Disableing attempted cache usage since metastore path may not be absolute, or depth is < 3. MetaStorePath={}",
+ metaStorePathString);
+ metaStorePath = null;
+ attemptingCacheUsage = false;
+ }
+
+ }
+ }
String execEngine = conf.get("hive.execution.engine");
conf.set("hive.execution.engine", "mr");
SessionState.start(conf);
conf.set("hive.execution.engine", execEngine);
- db = Hive.get(conf);
- drv = new Driver(conf);
- drv.init();
- pd = new ParseDriver();
- sem = new SemanticAnalyzer(conf);
+
+ if (!attemptingCacheUsage) {
+ setupDbsEtc(true, true);
+ }
+ }
+
+ private void setupDbsEtc(boolean force, boolean isNewDb) throws HiveException {
+ if (!dbEtcSetup || force) {
+ if (isNewDb) {
+ db = Hive.get(conf);
+ } else {
+ db = Hive.getWithFastCheck(conf, false);
+ }
+ LOG.info("Obtained db");
+ drv = new Driver(conf);
+ drv.init();
+ pd = new ParseDriver();
+ sem = new SemanticAnalyzer(conf);
+ dbEtcSetup = true;
+ }
}
public void init(String tname) throws Exception {
@@ -944,8 +1155,9 @@ public void cliInit(String tname) throws Exception {
public String cliInit(String tname, boolean recreate) throws Exception {
if (recreate) {
cleanUp(tname);
- createSources(tname);
+ createSources(tname, true);
}
+ setupDbsEtc(false, true);
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
"org.apache.hadoop.hive.ql.security.DummyAuthenticator");
diff --git pom.xml pom.xml
index 97eb36d..83c5b54 100644
--- pom.xml
+++ pom.xml
@@ -76,6 +76,7 @@
${maven.test.classpath}
file://
${project.build.directory}/tmp
+ ${project.build.directory}
file://${test.tmp.dir}
${project.build.directory}/warehouse
pfile://
@@ -1027,6 +1028,7 @@
${basedir}/${hive.path.to.root}/data/files
${test.tmp.dir}
${test.tmp.dir.uri}
+ ${test.build.dir}
${test.dfs.mkdir}
${test.output.overwrite}
${test.warehouse.scheme}${test.warehouse.dir}
diff --git ql/src/test/templates/TestCliDriver.vm ql/src/test/templates/TestCliDriver.vm
index 72cfab9..de82726 100644
--- ql/src/test/templates/TestCliDriver.vm
+++ ql/src/test/templates/TestCliDriver.vm
@@ -17,23 +17,34 @@
*/
package org.apache.hadoop.hive.cli;
+import com.google.common.base.Stopwatch;
import org.apache.hadoop.hive.ql.QTestUtil;
import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
+import java.util.concurrent.TimeUnit;
+
public class $className {
+ private static final Logger LOG = LoggerFactory.getLogger(${className}.class);
+
private static final String HIVE_ROOT = QTestUtil.ensurePathEndsInSlash(System.getProperty("hive.root"));
private static QTestUtil qt;
static {
+ Stopwatch stopwatch = new Stopwatch().start();
+ String message = "Starting TestCliDriver run at " + System.currentTimeMillis();
+ LOG.info(message);
+ System.err.println(message);
MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
String hiveConfDir = "$hiveConfDir";
String initScript = "$initScript";
@@ -44,15 +55,30 @@ public class $className {
if (!hiveConfDir.isEmpty()) {
hiveConfDir = HIVE_ROOT + hiveConfDir;
}
+ // TODO Is ZK startup required for TestCliDriver
+ // TODO Is LlapIo enabled required for TestCliDriver
qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR,
- hiveConfDir, hadoopVer, initScript, cleanupScript, useHBaseMetastore, true);
+ hiveConfDir, hadoopVer, initScript, cleanupScript, useHBaseMetastore, true, "$className",
+ $attemptCache);
+ message = "QTestUtil instance created. ElapsedTimeSinceStart=" + stopwatch.elapsed(
+ TimeUnit.MILLISECONDS);
+ LOG.info(message);
+ System.err.println(message);
// do a one time initialization
qt.cleanUp();
+ message = "Initialization cleanup done. ElapsedTimeSinceStart=" + stopwatch.elapsed(TimeUnit.MILLISECONDS);
+ LOG.info(message);
+ System.err.println(message);
+
qt.createSources();
+ message = "Initialization createSources done. ElapsedTimeSinceStart=" + stopwatch.elapsed(TimeUnit.MILLISECONDS);
+ LOG.info(message);
+ System.err.println(message);
} catch (Exception e) {
- System.err.println("Exception: " + e.getMessage());
+ System.err.println("Exception: " + e.getMessage() + ". ElapsedTimeSinceStart="
+ + stopwatch.elapsed(TimeUnit.MILLISECONDS));
e.printStackTrace();
System.err.flush();
fail("Unexpected exception in static initialization: "+e.getMessage());
@@ -62,6 +88,7 @@ public class $className {
@Before
public void setUp() {
try {
+ // TODO This restarts ZK for each test. Is that requried ?
qt.clearTestSideEffects();
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
@@ -113,7 +140,9 @@ public class $className {
private void runTest(String tname, String fname, String fpath) throws Exception {
long startTime = System.currentTimeMillis();
try {
- System.err.println("Begin query: " + fname);
+ String message = "Begin query: " + fname + ", startTime=" + startTime;
+ System.err.println(message);
+ LOG.info(message);
qt.addFile(fpath);
@@ -136,7 +165,9 @@ public class $className {
}
long elapsedTime = System.currentTimeMillis() - startTime;
- System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s");
+ String message = "Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s";
+ System.err.println(message);
+ LOG.info(message);
assertTrue("Test passed", true);
}
}