diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 1943c6d84e..837ad31789 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -2225,8 +2225,7 @@ private TaskRunner launchTask(Task tsk, String queryId, console.printInfo("Launching Job " + cxt.getCurJobNo() + " out of " + jobs); } tsk.initialize(queryState, plan, cxt, ctx.getOpContext()); - TaskResult tskRes = new TaskResult(); - TaskRunner tskRun = new TaskRunner(tsk, tskRes); + TaskRunner tskRun = new TaskRunner(tsk); cxt.launching(tskRun); // Launch Task diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 6193b900e0..7af45af7ba 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -25,13 +25,13 @@ import org.apache.hadoop.hive.ql.QueryDisplay; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.history.HiveHistory; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; -import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; @@ -194,17 +194,16 @@ protected Hive getHive() { * * @return return value of execute() */ - public int executeTask() { + public int executeTask(HiveHistory hiveHistory) { try { - SessionState ss = SessionState.get(); this.setStarted(); - if (ss != null) { - ss.getHiveHistory().logPlanProgress(queryPlan); + if (hiveHistory != null) { + hiveHistory.logPlanProgress(queryPlan); } int retval = execute(driverContext); this.setDone(); - if (ss != null) { - ss.getHiveHistory().logPlanProgress(queryPlan); + if (hiveHistory != null) { + hiveHistory.logPlanProgress(queryPlan); } return retval; } catch (IOException e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java index eddc31e1f0..3c988dde32 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java @@ -46,9 +46,9 @@ protected Long initialValue() { private static transient final Logger LOG = LoggerFactory.getLogger(TaskRunner.class); - public TaskRunner(Task tsk, TaskResult result) { + public TaskRunner(Task tsk) { this.tsk = tsk; - this.result = result; + this.result = new TaskResult(); ss = SessionState.get(); } @@ -94,7 +94,7 @@ public void run() { public void runSequential() { int exitVal = -101; try { - exitVal = tsk.executeTask(); + exitVal = tsk.executeTask(ss == null ? null : ss.getHiveHistory()); } catch (Throwable t) { if (tsk.getException() == null) { tsk.setException(t); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java index c23d202383..83d87f9b6f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java @@ -308,12 +308,12 @@ public void progressTask(String queryId, Task task) { new ThreadLocal>() { @Override protected Map initialValue() { - return new HashMap(); + return new HashMap<>(); } }; @Override - public void logPlanProgress(QueryPlan plan) throws IOException { + public synchronized void logPlanProgress(QueryPlan plan) throws IOException { if (plan != null) { Map ctrmap = ctrMapFactory.get(); ctrmap.put("plan", plan.toString()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java index deba1d5a01..d00df78365 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java @@ -57,7 +57,7 @@ private void analyze(ASTNode ast) throws Exception { List> rootTasks = analyzer.getRootTasks(); Assert.assertEquals(1, rootTasks.size()); for(Task task : rootTasks) { - Assert.assertEquals(0, task.executeTask()); + Assert.assertEquals(0, task.executeTask(null)); } } @Test