commit e9bab7d62083c2092b9b6fd8777ecb538e2c0fd7 Author: Sahil Takiar Date: Fri Jan 5 17:54:17 2018 -0800 HIVE-18389: Print out Spark Web UI URL to the console log diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java index d9d4b372b0..a435ed2f15 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java @@ -101,7 +101,8 @@ } private void createRemoteClient() throws Exception { - remoteClient = SparkClientFactory.createClient(conf, hiveConf, sessionId); + remoteClient = SparkClientFactory.createClient(conf, hiveConf, sessionId, + SessionState.LogHelper.getInfoStream()); if (HiveConf.getBoolVar(hiveConf, ConfVars.HIVE_PREWARM_ENABLED) && (SparkClientUtilities.isYarnMaster(hiveConf.get("spark.master")) || diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/BaseProtocol.java b/spark-client/src/main/java/org/apache/hive/spark/client/BaseProtocol.java index 093032525a..729080936b 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/client/BaseProtocol.java +++ b/spark-client/src/main/java/org/apache/hive/spark/client/BaseProtocol.java @@ -166,4 +166,16 @@ } + protected static class SparkUIWebURL implements Serializable { + + final String UIWebURL; + + SparkUIWebURL(String UIWebURL) { + this.UIWebURL = UIWebURL; + } + + SparkUIWebURL() { + this(null); + } + } } diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java b/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java index ede8ce9e40..66cf33942c 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java +++ b/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java @@ -25,6 +25,8 @@ import java.io.File; import java.io.IOException; import java.io.Serializable; +import java.net.InetAddress; +import java.net.URI; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -169,6 +171,14 @@ public void rpcClosed(Rpc rpc) { throw e; } + if (jc.sc().sc().uiWebUrl().isDefined()) { + // Run a reverse DNS lookup on the URL + URI uri = URI.create(jc.sc().sc().uiWebUrl().get()); + InetAddress address = InetAddress.getByName(uri.getHost()); + this.protocol.sendUIWebURL(uri.getScheme() + "://" + address.getCanonicalHostName() + ":" + + uri.getPort()); + } + synchronized (jcLock) { for (Iterator> it = jobQueue.iterator(); it.hasNext();) { it.next().submit(); @@ -270,6 +280,11 @@ void sendMetrics(String jobId, int sparkJobId, int stageId, long taskId, Metrics clientRpc.call(new JobMetrics(jobId, sparkJobId, stageId, taskId, metrics)); } + void sendUIWebURL(String UIWebURL) { + LOG.debug("Send UIWebURL({}) to Client.", UIWebURL); + clientRpc.call(new SparkUIWebURL(UIWebURL)); + } + private void handle(ChannelHandlerContext ctx, CancelJob msg) { JobWrapper job = activeJobs.get(msg.id); if (job == null || !cancelJob(job)) { diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientFactory.java b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientFactory.java index 50c7bb20c4..c50d84828b 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientFactory.java +++ b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientFactory.java @@ -18,6 +18,7 @@ package org.apache.hive.spark.client; import java.io.IOException; +import java.io.PrintStream; import java.util.Map; import org.apache.hadoop.hive.common.classification.InterfaceAudience; @@ -83,10 +84,11 @@ public static void stop() { * @param sparkConf Configuration for the remote Spark application, contains spark.* properties. * @param hiveConf Configuration for Hive, contains hive.* properties. */ - public static SparkClient createClient(Map sparkConf, HiveConf hiveConf, String sessionId) - throws IOException, SparkException { + public static SparkClient createClient(Map sparkConf, HiveConf hiveConf, + String sessionId, PrintStream consoleStream) + throws IOException, SparkException { Preconditions.checkState(server != null, "initialize() not called."); - return new SparkClientImpl(server, sparkConf, hiveConf, sessionId); + return new SparkClientImpl(server, sparkConf, hiveConf, sessionId, consoleStream); } } diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java index 49b7deb5ee..4713e7d770 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java +++ b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java @@ -38,6 +38,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; +import java.io.PrintStream; import java.io.Serializable; import java.io.Writer; import java.net.URI; @@ -88,13 +89,15 @@ private final Map> jobs; private final Rpc driverRpc; private final ClientProtocol protocol; + private final PrintStream consoleStream; private volatile boolean isAlive; SparkClientImpl(RpcServer rpcServer, Map conf, HiveConf hiveConf, - String sessionid) throws IOException, SparkException { + String sessionid, PrintStream consoleStream) throws IOException { this.conf = conf; this.hiveConf = hiveConf; this.jobs = Maps.newConcurrentMap(); + this.consoleStream = consoleStream; String secret = rpcServer.createSecret(); this.driverThread = startDriver(rpcServer, sessionid, secret); @@ -651,6 +654,11 @@ private void handle(ChannelHandlerContext ctx, JobSubmitted msg) { } } + private void handle(ChannelHandlerContext ctx, SparkUIWebURL msg) { + String printMsg = "Hive on Spark Session Web UI URL: " + msg.UIWebURL; + consoleStream.println(printMsg); + LOG.info(printMsg); + } } private static class AddJarJob implements Job { diff --git a/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java b/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java index 697d8d144d..dd20af2d3d 100644 --- a/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java +++ b/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java @@ -34,6 +34,7 @@ import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; +import java.io.PrintStream; import java.io.Serializable; import java.net.URI; import java.util.Arrays; @@ -314,7 +315,8 @@ private void runTest(boolean local, TestFunction test) throws Exception { SparkClient client = null; try { test.config(conf); - client = SparkClientFactory.createClient(conf, HIVECONF, UUID.randomUUID().toString()); + client = SparkClientFactory.createClient(conf, HIVECONF, UUID.randomUUID().toString(), + mock(PrintStream.class)); test.call(client); } finally { if (client != null) {