diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java index 3651c9c..7f11104 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hive.ql.hooks; -import java.io.Serializable; import java.net.InetAddress; import java.util.ArrayList; import java.util.HashMap; @@ -118,6 +117,7 @@ public void run(final HookContext hookContext) throws Exception { final long currentTime = System.currentTimeMillis(); final HiveConf conf = new HiveConf(hookContext.getConf()); final QueryState queryState = hookContext.getQueryState(); + final Map durations = hookContext.getDurations(); executor.submit(new Runnable() { @Override @@ -175,10 +175,10 @@ public void run() { tablesRead, tablesWritten, conf)); break; case POST_EXEC_HOOK: - fireAndForget(conf, createPostHookEvent(queryId, currentTime, user, requestuser, true, opId, hookContext.getPerfLogger())); + fireAndForget(conf, createPostHookEvent(queryId, currentTime, user, requestuser, true, opId, durations)); break; case ON_FAILURE_HOOK: - fireAndForget(conf, createPostHookEvent(queryId, currentTime, user, requestuser , false, opId, hookContext.getPerfLogger())); + fireAndForget(conf, createPostHookEvent(queryId, currentTime, user, requestuser , false, opId, durations)); break; default: //ignore @@ -290,7 +290,7 @@ TimelineEntity createPreHookEvent(String queryId, String query, JSONObject expla } TimelineEntity createPostHookEvent(String queryId, long stopTime, String user, String requestuser, boolean success, - String opId, PerfLogger perfLogger) throws Exception { + String opId, Map durations) throws Exception { LOG.info("Received post-hook notification for :" + queryId); TimelineEntity atsEntity = new TimelineEntity(); @@ -311,8 +311,8 @@ TimelineEntity createPostHookEvent(String queryId, long stopTime, String user, S // Perf times JSONObject perfObj = new JSONObject(new LinkedHashMap<>()); - for (String key : perfLogger.getEndTimes().keySet()) { - perfObj.put(key, perfLogger.getDuration(key)); + for (Map.Entry entry : durations.entrySet()) { + perfObj.put(entry.getKey(), entry.getValue()); } atsEntity.addOtherInfo(OtherInfoTypes.PERF.name(), perfObj.toString()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java index c94100c..0bc5093 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.hooks; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -66,7 +67,7 @@ private final String sessionId; private final String threadId; private boolean isHiveServerQuery; - private PerfLogger perfLogger; + private Map durations; public HookContext(QueryPlan queryPlan, QueryState queryState, Map inputPathToContentSummary, String userName, String ipAddress, String hiveInstanceAddress, @@ -92,7 +93,10 @@ public HookContext(QueryPlan queryPlan, QueryState queryState, this.sessionId = sessionId; this.threadId = threadId; this.isHiveServerQuery = isHiveServerQuery; - this.perfLogger = perfLogger; + this.durations = new HashMap(); + for (String key : perfLogger.getEndTimes().keySet()) { + this.durations.put(key, perfLogger.getDuration(key)); + } } public QueryPlan getQueryPlan() { @@ -231,12 +235,12 @@ public void setHiveServerQuery(boolean isHiveServerQuery) { this.isHiveServerQuery = isHiveServerQuery; } - public PerfLogger getPerfLogger() { - return perfLogger; + public Map getDurations() { + return durations; } - public void setPerfLogger(PerfLogger perfLogger) { - this.perfLogger = perfLogger; + public void setDurations(Map durations) { + this.durations = durations; } }