diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java index ef3d8f8..dd73f3e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java @@ -21,6 +21,7 @@ import java.util.Arrays; import java.util.Map; +import com.google.common.base.Preconditions; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.spark.status.impl.RemoteSparkJobStatus; @@ -103,6 +104,10 @@ public int startMonitor() { printStatus(progressMap, lastProgressMap); lastProgressMap = progressMap; + } else if (sparkJobState == null) { + // in case the remote context crashes between JobStarted and JobSubmitted + Preconditions.checkState(sparkJobStatus.isRemoteActive(), + "Remote context becomes inactive."); } break; case SUCCEEDED: @@ -150,7 +155,7 @@ public int startMonitor() { } } catch (Exception e) { String msg = " with exception '" + Utilities.getNameMessage(e) + "'"; - msg = "Failed to monitor Job[ " + sparkJobStatus.getJobId() + "]" + msg; + msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg; // Has to use full name to make sure it does not conflict with // org.apache.commons.lang.StringUtils diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java index 0e3e541..951dbb4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java @@ -151,6 +151,14 @@ public void setError(Throwable e) { this.error = e; } + /** + * Indicates whether the remote context is active. SparkJobMonitor can use this to decide whether + * to stop monitoring. + */ + public boolean isRemoteActive() { + return sparkClient.isActive(); + } + private SparkJobInfo getSparkJobInfo() throws HiveException { Integer sparkJobId = jobHandle.getSparkJobIds().size() == 1 ? jobHandle.getSparkJobIds().get(0) : null;