Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (revision 1082244) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (working copy) @@ -100,8 +100,8 @@ import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; import org.apache.hadoop.hive.ql.plan.PlanUtils; +import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes; import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes; import org.apache.hadoop.hive.ql.stats.StatsFactory; import org.apache.hadoop.hive.ql.stats.StatsPublisher; import org.apache.hadoop.hive.serde.Constants; @@ -110,8 +110,8 @@ import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.mapred.FileOutputFormat; @@ -1757,4 +1757,40 @@ return false; } } + + private static ThreadLocal> perfKeyMaps = new ThreadLocal>(); + + /** + * Call this function when you start to measure time spent by a piece of code. + * @param _log the logging object to be used. + * @param method method or ID that identifies this perf log element. + */ + public static void PerfLogBegin(Log _log, String method) { + long startTime = System.currentTimeMillis(); + _log.info(""); + if (perfKeyMaps.get() == null) { + perfKeyMaps.set(new HashMap()); + } + perfKeyMaps.get().put(method, new Long(startTime)); + } + + /** + * Call this function in correspondence of PerfLogBegin to mark the end of the measurement. + * @param _log + * @param method + */ + public static void PerfLogEnd(Log _log, String method) { + Long startTime = perfKeyMaps.get().get(method); + long endTime = System.currentTimeMillis(); + StringBuilder sb = new StringBuilder(""); + _log.info(sb); + } } Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 1082244) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -313,6 +313,9 @@ * The SQL query to compile. */ public int compile(String command) { + + Utilities.PerfLogBegin(LOG, "compile"); + if (plan != null) { close(); plan = null; @@ -391,11 +394,14 @@ if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) { try { + Utilities.PerfLogBegin(LOG, "doAuthorization"); doAuthorization(sem); } catch (AuthorizationException authExp) { console.printError("Authorization failed:" + authExp.getMessage() + ". Use show grant to get more details."); return 403; + } finally { + Utilities.PerfLogEnd(LOG, "doAuthorization"); } } @@ -418,6 +424,8 @@ console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (12); + } finally { + Utilities.PerfLogEnd(LOG, "compile"); } } @@ -666,6 +674,9 @@ * sure that the locks are lexicographically sorted. **/ public int acquireReadWriteLocks() { + + Utilities.PerfLogBegin(LOG, "acquireReadWriteLocks"); + try { int sleepTime = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES) * 1000; int numRetries = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES); @@ -764,6 +775,8 @@ console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (10); + } finally { + Utilities.PerfLogEnd(LOG, "acquireReadWriteLocks"); } } @@ -787,10 +800,14 @@ * locks have already been released, ignore them **/ private void releaseLocks(List hiveLocks) { + Utilities.PerfLogBegin(LOG, "releaseLocks"); + if (hiveLocks != null) { ctx.getHiveLockMgr().releaseLocks(hiveLocks); } ctx.setHiveLocks(null); + + Utilities.PerfLogEnd(LOG, "releaseLocks"); } public CommandProcessorResponse run(String command) { @@ -798,6 +815,7 @@ SQLState = null; int ret = compile(command); + if (ret != 0) { releaseLocks(ctx.getHiveLocks()); return new CommandProcessorResponse(ret, errorMessage, SQLState); @@ -893,6 +911,8 @@ } public int execute() { + Utilities.PerfLogBegin(LOG, "Driver.execute"); + boolean noName = StringUtils.isEmpty(conf.getVar(HiveConf.ConfVars.HADOOPJOBNAME)); int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH); @@ -919,10 +939,18 @@ for (Hook peh : getPreExecHooks()) { if (peh instanceof ExecuteWithHookContext) { + Utilities.PerfLogBegin(LOG, "PreHook." + peh.getClass().getSimpleName()); + ((ExecuteWithHookContext) peh).run(hookContext); + + Utilities.PerfLogEnd(LOG, "PreHook." + peh.getClass().getSimpleName()); } else if (peh instanceof PreExecute) { + Utilities.PerfLogBegin(LOG, "PreHook." + peh.getClass().getSimpleName()); + ((PreExecute) peh).run(SessionState.get(), plan.getInputs(), plan.getOutputs(), ShimLoader.getHadoopShims().getUGIForConf(conf)); + + Utilities.PerfLogEnd(LOG, "PreHook." + peh.getClass().getSimpleName()); } } @@ -1039,11 +1067,19 @@ // Get all the post execution hooks and execute them. for (Hook peh : getPostExecHooks()) { if (peh instanceof ExecuteWithHookContext) { + Utilities.PerfLogBegin(LOG, "PostHook." + peh.getClass().getSimpleName()); + ((ExecuteWithHookContext) peh).run(hookContext); + + Utilities.PerfLogEnd(LOG, "PostHook." + peh.getClass().getSimpleName()); } else if (peh instanceof PostExecute) { + Utilities.PerfLogBegin(LOG, "PostHook." + peh.getClass().getSimpleName()); + ((PostExecute) peh).run(SessionState.get(), plan.getInputs(), plan.getOutputs(), (SessionState.get() != null ? SessionState.get().getLineageState().getLineageInfo() : null), ShimLoader.getHadoopShims().getUGIForConf(conf)); + + Utilities.PerfLogEnd(LOG, "PostHook." + peh.getClass().getSimpleName()); } } @@ -1071,6 +1107,7 @@ if (noName) { conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, ""); } + Utilities.PerfLogEnd(LOG, "Driver.execute"); } plan.setDone();