diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java index ef3d8f8..459f567 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java @@ -21,6 +21,7 @@ import java.util.Arrays; import java.util.Map; +import com.google.common.base.Preconditions; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.spark.status.impl.RemoteSparkJobStatus; @@ -146,11 +147,14 @@ public int startMonitor() { } if (!done) { + // break the loop if remote context dies while job hasn't finished + Preconditions.checkState(sparkJobStatus.isRemoteActive(), + "Remote context becomes inactive."); Thread.sleep(checkInterval); } } catch (Exception e) { String msg = " with exception '" + Utilities.getNameMessage(e) + "'"; - msg = "Failed to monitor Job[ " + sparkJobStatus.getJobId() + "]" + msg; + msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg; // Has to use full name to make sure it does not conflict with // org.apache.commons.lang.StringUtils diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java index 0e3e541..951dbb4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java @@ -151,6 +151,14 @@ public void setError(Throwable e) { this.error = e; } + /** + * Indicates whether the remote context is active. SparkJobMonitor can use this to decide whether + * to stop monitoring. + */ + public boolean isRemoteActive() { + return sparkClient.isActive(); + } + private SparkJobInfo getSparkJobInfo() throws HiveException { Integer sparkJobId = jobHandle.getSparkJobIds().size() == 1 ? jobHandle.getSparkJobIds().get(0) : null;