diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 39d5d9e..9b45e7c 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -858,7 +858,12 @@ public String cliInit(String tname, boolean recreate) throws Exception { HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator"); Utilities.clearWorkMap(); - CliSessionState ss = createSessionState(); + CliSessionState ss; + if (recreate || !(clusterType == MiniClusterType.tez)) { + ss = createSessionState(); + } else { + ss = (CliSessionState) SessionState.get(); + } assert ss != null; ss.in = System.in; @@ -885,20 +890,22 @@ public String cliInit(String tname, boolean recreate) throws Exception { } ss.err = new CachingPrintStream(fo, true, "UTF-8"); ss.setIsSilent(true); - SessionState oldSs = SessionState.get(); - - if (oldSs != null && (clusterType == MiniClusterType.tez || clusterType == MiniClusterType.spark - || clusterType == MiniClusterType.miniSparkOnYarn)) { - sparkSession = oldSs.getSparkSession(); - ss.setSparkSession(sparkSession); - oldSs.setSparkSession(null); - oldSs.close(); - } + if (recreate || !(clusterType == MiniClusterType.tez)) { + SessionState oldSs = SessionState.get(); + + if (oldSs != null && (clusterType == MiniClusterType.tez || clusterType == MiniClusterType.spark + || clusterType == MiniClusterType.miniSparkOnYarn)) { + sparkSession = oldSs.getSparkSession(); + ss.setSparkSession(sparkSession); + oldSs.setSparkSession(null); + oldSs.close(); + } - if (oldSs != null && oldSs.out != null && oldSs.out != System.out) { - oldSs.out.close(); + if (oldSs != null && oldSs.out != null && oldSs.out != System.out) { + oldSs.out.close(); + } + SessionState.start(ss); } - SessionState.start(ss); cliDriver = new CliDriver();