diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index 8baf309e7f..1473a79f77 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -585,6 +585,8 @@ "Cannot create Spark client on a closed session {0}", true), SPARK_JOB_INTERRUPTED(30044, "Spark job was interrupted while executing"), + SPARK_GET_JOB_INFO_INTERRUPTED(30045, "Spark job was interrupted while getting job info"), + SPARK_GET_JOB_INFO_EXECUTIONERROR(30046, "Spark job failed in execution while getting job info"), //========================== 40000 range starts here ========================// diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java index e4a53fb134..fe526376ec 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java @@ -46,8 +46,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; /** * Used with remove spark client. @@ -197,10 +199,16 @@ private SparkJobInfo getSparkJobInfo() throws HiveException { new GetJobInfoJob(jobHandle.getClientJobId(), sparkJobId)); try { return getJobInfo.get(sparkClientTimeoutInSeconds, TimeUnit.SECONDS); - } catch (Exception e) { + } catch (TimeoutException e) { LOG.warn("Failed to get job info.", e); throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_TIMEOUT, Long.toString(sparkClientTimeoutInSeconds)); + } catch (InterruptedException e) { + LOG.warn("Failed to get job info.", e); + throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_INTERRUPTED); + } catch (ExecutionException e) { + LOG.warn("Failed to get job info.", e); + throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_EXECUTIONERROR); } }