commit ffd694a2e4d346f14f6efe9b1f9484cea1a8de0c Author: Sahil Takiar Date: Fri Feb 9 16:08:25 2018 -0800 HIVE-18672: Printed state in RemoteSparkJobMonitor is ambiguous diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java index 22f70243e6..3856518e76 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java @@ -62,10 +62,11 @@ public int startMonitor() { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.SPARK_SUBMIT_TO_RUNNING); startTime = System.currentTimeMillis(); + JobHandle.State state = null; while (true) { try { - JobHandle.State state = sparkJobStatus.getRemoteJobState(); + state = sparkJobStatus.getRemoteJobState(); Preconditions.checkState(sparkJobStatus.isRemoteActive(), "Connection to remote Spark driver was lost"); switch (state) { @@ -76,14 +77,13 @@ public int startMonitor() { HiveException he = new HiveException(ErrorMsg.SPARK_JOB_MONITOR_TIMEOUT, Long.toString(timeCount)); console.printError(he.getMessage()); - console.printError("Status: " + state); sparkJobStatus.setError(he); running = false; done = true; rc = 2; } if (LOG.isDebugEnabled()) { - console.printInfo("state = " + state); + console.printInfo("Spark job[" + sparkJobStatus.getJobId() + "] state = " + state); } break; case STARTED: @@ -186,7 +186,8 @@ public int startMonitor() { finalException = new HiveException(e, ErrorMsg.SPARK_JOB_INTERRUPTED); LOG.warn("Interrupted while monitoring the Hive on Spark application, exiting"); } else { - String msg = " with exception '" + Utilities.getNameMessage(e) + "'"; + String msg = " with exception '" + Utilities.getNameMessage(e) + "' Last known state = " + + (state != null ? state.name() : "UNKNOWN"); msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg; // Has to use full name to make sure it does not conflict with