diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index f52350d..f6081f4 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -913,13 +913,15 @@ public void setSparkSession(SparkSession sparkSession) { long endTime = System.currentTimeMillis() + 240000; while (sparkSession.getMemoryAndCores().getSecond() <= 1) { if (System.currentTimeMillis() >= endTime) { - LOG.error("Timed out waiting for Spark cluster to init"); - break; + String msg = "Timed out waiting for Spark cluster to init"; + throw new IllegalStateException(msg); } Thread.sleep(100); } } catch (Exception e) { - LOG.error(e); + String msg = "Error trying to obtain executor info: " + e; + LOG.error(msg, e); + throw new IllegalStateException(msg, e); } } } diff --git a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java index 8e17790..bb6a934 100644 --- a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java +++ b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java @@ -438,6 +438,15 @@ public MiniSparkShim(Configuration conf, int numberOfTaskTrackers, mr = new MiniSparkOnYARNCluster("sparkOnYarn"); conf.set("fs.defaultFS", nameNode); + String user; + try { + user =Utils.getUGI().getShortUserName(); + } catch (Exception e) { + String msg = "Cannot obtain username: " + e; + throw new IllegalStateException(msg, e); + } + conf.set("hadoop.proxyuser." + user + ".groups", "*"); + conf.set("hadoop.proxyuser." + user + ".hosts", "*"); conf.set("yarn.resourcemanager.scheduler.class", "org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler"); mr.init(conf); mr.start(); @@ -493,6 +502,15 @@ public void setupConfiguration(Configuration conf) { int numDataNodes, boolean format, String[] racks) throws IOException { + String user; + try { + user = Utils.getUGI().getShortUserName(); + } catch (Exception e) { + String msg = "Cannot obtain username: " + e; + throw new IllegalStateException(msg, e); + } + conf.set("hadoop.proxyuser." + user + ".groups", "*"); + conf.set("hadoop.proxyuser." + user + ".hosts", "*"); MiniDFSCluster miniDFSCluster = new MiniDFSCluster(conf, numDataNodes, format, racks); // Need to set the client's KeyProvider to the NN's for JKS, diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java b/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java index c2ac0c2..4e15902 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java +++ b/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java @@ -166,7 +166,7 @@ public void rpcClosed(Rpc rpc) { jcLock.notifyAll(); } } catch (Exception e) { - LOG.error("Failed to start SparkContext.", e); + LOG.error("Failed to start SparkContext: " + e, e); shutdown(e); synchronized (jcLock) { jcLock.notifyAll(); @@ -203,7 +203,11 @@ private void submit(JobWrapper job) { private synchronized void shutdown(Throwable error) { if (running) { - LOG.info("Shutting down remote driver."); + if (error == null) { + LOG.info("Shutting down remote driver."); + } else { + LOG.error("Shutting down remote driver due to error: " + error, error); + } running = false; for (JobWrapper job : activeJobs.values()) { cancelJob(job); diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java index 9f9a1c1..ba08106 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java +++ b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java @@ -47,6 +47,7 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.shims.Utils; import org.apache.hive.spark.client.rpc.Rpc; import org.apache.hive.spark.client.rpc.RpcConfiguration; import org.apache.hive.spark.client.rpc.RpcServer; @@ -350,6 +351,16 @@ public void run() { } } + if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) { + argv.add("--proxy-user"); + try { + argv.add(Utils.getUGI().getShortUserName()); + } catch (Exception e) { + String msg = "Cannot obtain username: " + e; + throw new IllegalStateException(msg, e); + } + } + argv.add("--properties-file"); argv.add(properties.getAbsolutePath()); argv.add("--class");