diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/UtilsForTest.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/UtilsForTest.java index 6207d32026..2699154cc0 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/UtilsForTest.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/UtilsForTest.java @@ -18,6 +18,9 @@ package org.apache.hadoop.hive; +import java.util.Iterator; +import java.util.Map; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -38,4 +41,20 @@ public static void setNewDerbyDbLocation(HiveConf conf, String newloc) { + ";create=true"); } + /** + * Do the variable expansion by calling "set" on each variable. + * When MR jobs are run, under some circumstances they fail because + * the variable expansion fails after changes in Hadoop to prevent + * variable expansion for JobHistoryServer. So expanding them ahead + * so that variables like {test.tmp.dir} get expanded. + * @param hiveConf + */ + public static void expandHiveConfParams(HiveConf hiveConf) { + Iterator> iter = hiveConf.iterator(); + while (iter.hasNext()) { + String key = iter.next().getKey(); + hiveConf.set(key, hiveConf.get(key)); + } + } + } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java index 339c4aebab..819838d091 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java @@ -18,6 +18,7 @@ package org.apache.hive.service.cli; +import org.apache.hadoop.hive.UtilsForTest; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService; import org.apache.hive.service.cli.thrift.ThriftCLIService; @@ -39,6 +40,7 @@ public static void setUpBeforeClass() throws Exception { HiveConf conf = new HiveConf(); conf.setBoolean("datanucleus.schema.autoCreateTables", true); conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); + UtilsForTest.expandHiveConfParams(conf); service.init(conf); client = new ThriftCLIServiceClient(service); } diff --git a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java index e4ac0a927e..e5b100c549 100644 --- a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java +++ b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java @@ -22,6 +22,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.util.HashMap; +import java.util.Iterator; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -178,6 +179,11 @@ public MiniHS2 build() throws Exception { if (miniClusterType == MiniClusterType.MR && useMiniKdc) { throw new IOException("Can't create secure miniMr ... yet"); } + Iterator> iter = hiveConf.iterator(); + while (iter.hasNext()) { + String key = iter.next().getKey(); + hiveConf.set(key, hiveConf.get(key)); + } if (isHTTPTransMode) { hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_HTTP_MODE); } else { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index 1de782a756..01dd93c527 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -27,9 +27,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.Iterator; import java.util.List; -import java.util.Map; import java.util.Properties; import org.apache.commons.lang.StringUtils; @@ -175,12 +173,6 @@ public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); - Iterator> iter = conf.iterator(); - while(iter.hasNext()) { - String key = iter.next().getKey(); - conf.set(key, conf.get(key)); - } - job = new JobConf(conf, ExecDriver.class); initializeFiles("tmpjars", getResource(conf, SessionState.ResourceType.JAR));