commit ce818b621d4cde0c2dac85b9d9ab46ce1d65b3ca Author: Sahil Takiar Date: Wed Oct 18 17:47:17 2017 -0700 HIVE-17835: HS2 Logs print unnecessary stack trace when HoS query is cancelled diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java index c6e17b5d20..62daaaa610 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java @@ -117,6 +117,7 @@ public int execute(DriverContext driverContext) { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.SPARK_SUBMIT_JOB); if (driverContext.isShutdown()) { + LOG.warn("Killing Spark job"); killJob(); throw new HiveException("Operation is cancelled."); } @@ -337,7 +338,7 @@ private void killJob() { try { jobRef.cancelJob(); } catch (Exception e) { - LOG.warn("failed to kill job", e); + LOG.warn("Failed to kill Spark job", e); } } } @@ -424,6 +425,7 @@ private void getSparkJobInfo(SparkJobStatus sparkJobStatus, int rc) { if ((error instanceof InterruptedException) || (error instanceof HiveException && error.getCause() instanceof InterruptedException)) { + LOG.info("Killing Spark job since query was interrupted"); killJob(); } HiveException he; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java index 6c7aca7be8..4c4ce552bf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java @@ -184,16 +184,19 @@ public int startMonitor() { } } catch (Exception e) { Exception finalException = e; - if (e instanceof InterruptedException) { + if (e instanceof InterruptedException || + (e instanceof HiveException && e.getCause() instanceof InterruptedException)) { finalException = new HiveException(e, ErrorMsg.SPARK_JOB_INTERRUPTED); + LOG.warn("Interrupted while monitoring the Hive on Spark application, exiting"); + } else { + String msg = " with exception '" + Utilities.getNameMessage(e) + "'"; + msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg; + + // Has to use full name to make sure it does not conflict with + // org.apache.commons.lang.StringUtils + LOG.error(msg, e); + console.printError(msg, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); } - String msg = " with exception '" + Utilities.getNameMessage(e) + "'"; - msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg; - - // Has to use full name to make sure it does not conflict with - // org.apache.commons.lang.StringUtils - LOG.error(msg, e); - console.printError(msg, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); rc = 1; done = true; sparkJobStatus.setError(finalException);