diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index 8baf309e7f..bc2cffaf62 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -585,6 +585,8 @@ "Cannot create Spark client on a closed session {0}", true), SPARK_JOB_INTERRUPTED(30044, "Spark job was interrupted while executing"), + SPARK_GET_JOB_INFO_INTERRUPTED(30045, "Spark job was interrupted while getting job info"), + SPARK_GET_JOB_INFO_EXECUTIONERROR(30046, "Spark job failed in execution while getting job info due to exception {0}"), //========================== 40000 range starts here ========================// diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java index d2e28b02ba..832832b325 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java @@ -25,6 +25,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.base.Throwables; + import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatistics; import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatisticsBuilder; @@ -37,7 +39,6 @@ import org.apache.hive.spark.client.JobHandle; import org.apache.hive.spark.client.SparkClient; import org.apache.hive.spark.counter.SparkCounters; - import org.apache.spark.JobExecutionStatus; import org.apache.spark.SparkJobInfo; import org.apache.spark.SparkStageInfo; @@ -47,8 +48,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; /** * Used with remove spark client. @@ -198,10 +201,14 @@ private SparkJobInfo getSparkJobInfo() throws HiveException { new GetJobInfoJob(jobHandle.getClientJobId(), sparkJobId)); try { return getJobInfo.get(sparkClientTimeoutInSeconds, TimeUnit.SECONDS); - } catch (Exception e) { - LOG.warn("Failed to get job info.", e); + } catch (TimeoutException e) { throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_TIMEOUT, Long.toString(sparkClientTimeoutInSeconds)); + } catch (InterruptedException e) { + throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_INTERRUPTED); + } catch (ExecutionException e) { + throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_EXECUTIONERROR, + Throwables.getRootCause(e).getMessage()); } }