diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index a35a215bfc..c1c0a60297 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -2393,7 +2393,11 @@ private void setErrorMsgAndDetail(int exitVal, Throwable downstreamError, Task t if(downstreamError != null) { //here we assume that upstream code may have parametrized the msg from ErrorMsg //so we want to keep it - errorMessage += ". " + downstreamError.getMessage(); + if (downstreamError.getMessage() != null) { + errorMessage += ". " + downstreamError.getMessage(); + } else { + errorMessage += ". " + org.apache.hadoop.util.StringUtils.stringifyException(downstreamError); + } } else { ErrorMsg em = ErrorMsg.getErrorMsg(exitVal); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java index 207b66faba..a53ff5aff7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java @@ -300,6 +300,7 @@ public int execute(DriverContext driverContext) { Hive db = getHive(); return persistColumnStats(db); } catch (Exception e) { + setException(e); LOG.info("Failed to persist stats in metastore", e); } return 1; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainSQRewriteTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainSQRewriteTask.java index 80d54bfd3a..1f9e9aa1b9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainSQRewriteTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainSQRewriteTask.java @@ -38,11 +38,13 @@ import org.apache.hadoop.hive.ql.plan.ExplainSQRewriteWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ExplainSQRewriteTask extends Task implements Serializable { private static final long serialVersionUID = 1L; + private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); @Override public StageType getType() { @@ -76,8 +78,8 @@ public int execute(DriverContext driverContext) { return (0); } catch (Exception e) { - console.printError("Failed with exception " + e.getMessage(), - "\n" + StringUtils.stringifyException(e)); + setException(e); + LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); return (1); } finally { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 0b307215d0..34da025b63 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -65,7 +65,6 @@ import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.AnnotationUtils; import org.json.JSONArray; import org.json.JSONException; @@ -383,8 +382,8 @@ public int execute(DriverContext driverContext) { return (0); } catch (Exception e) { - console.printError("Failed with exception " + e.getMessage(), - "\n" + StringUtils.stringifyException(e)); + LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); + setException(e); return (1); } finally { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MaterializedViewTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MaterializedViewTask.java index 834df84869..19aef6cafe 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MaterializedViewTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MaterializedViewTask.java @@ -76,6 +76,7 @@ public int execute(DriverContext driverContext) { } } catch (HiveException e) { LOG.debug("Exception during materialized view cache update", e); + setException(e); } return 0; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java index 1cad5796ff..de270cfcdb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java @@ -163,8 +163,8 @@ protected int execute(DriverContext driverContext) { } return 0; } catch (Exception e) { - console.printError("Failed with exception " + e.getMessage(), "\n" - + StringUtils.stringifyException(e)); + LOG.error(StringUtils.stringifyException(e)); + setException(e); return (1); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java index 00eb7de030..7a4242aa51 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java @@ -112,6 +112,7 @@ public int execute(DriverContext driverContext) { } } catch (Exception e) { LOG.error("Failed to run stats task", e); + setException(e); return 1; } return 0; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index 7ff8ddc6a0..1de782a756 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -465,9 +465,9 @@ public int execute(DriverContext driverContext) { jc.close(); } } catch (Exception e) { - LOG.warn("Failed while cleaning up ", e); + LOG.warn("Failed while cleaning up ", e); } finally { - HadoopJobExecHelper.runningJobs.remove(rj); + HadoopJobExecHelper.runningJobs.remove(rj); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java index 434c3a881a..8f21f7c69e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java @@ -94,9 +94,8 @@ public int execute(DriverContext driverContext) { ctxCreated = true; } }catch (IOException e) { - e.printStackTrace(); - console.printError("Error launching map-reduce job", "\n" - + org.apache.hadoop.util.StringUtils.stringifyException(e)); + LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); + setException(e); return 5; } @@ -136,7 +135,8 @@ public int execute(DriverContext driverContext) { fs.mkdirs(tempOutPath); } } catch (IOException e) { - console.printError("Can't make path " + outputPath + " : " + e.getMessage()); + setException(e); + LOG.error("Can't make path " + outputPath, e); return 6; } @@ -191,19 +191,11 @@ public int execute(DriverContext driverContext) { success = (returnVal == 0); } catch (Exception e) { - e.printStackTrace(); - setException(e); - String mesg = " with exception '" + Utilities.getNameMessage(e) + "'"; - if (rj != null) { - mesg = "Ended Job = " + rj.getJobID() + mesg; - } else { - mesg = "Job Submission failed" + mesg; - } - + String mesg = rj != null ? ("Ended Job = " + rj.getJobID()) : "Job Submission failed"; // Has to use full name to make sure it does not conflict with // org.apache.commons.lang.StringUtils - console.printError(mesg, "\n" - + org.apache.hadoop.util.StringUtils.stringifyException(e)); + LOG.error(mesg, org.apache.hadoop.util.StringUtils.stringifyException(e)); + setException(e); success = false; returnVal = 1; @@ -220,9 +212,9 @@ public int execute(DriverContext driverContext) { ColumnTruncateMapper.jobClose(outputPath, success, job, console, work.getDynPartCtx(), null); } catch (Exception e) { - LOG.warn("Failed while cleaning up ", e); + LOG.warn("Failed while cleaning up ", e); } finally { - HadoopJobExecHelper.runningJobs.remove(rj); + HadoopJobExecHelper.runningJobs.remove(rj); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecutionOverlayPlugin.java b/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecutionOverlayPlugin.java index 950903c5c1..50803cc0a5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecutionOverlayPlugin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecutionOverlayPlugin.java @@ -42,7 +42,7 @@ public void run(HookContext hookContext) throws Exception { if (hookContext.getHookType() == HookType.ON_FAILURE_HOOK) { Throwable exception = hookContext.getException(); if (exception != null) { - if (exception.getMessage().contains("Vertex failed,")) { + if (exception.getMessage() != null && exception.getMessage().contains("Vertex failed,")) { retryPossible = true; } }