Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (revision 1164320) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (working copy) @@ -23,12 +23,12 @@ import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Collections; +import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Enumeration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.MapRedStats; @@ -47,10 +47,8 @@ import org.apache.hadoop.mapred.TaskReport; import org.apache.hadoop.mapred.Counters.Counter; import org.apache.log4j.Appender; -import org.apache.log4j.BasicConfigurator; import org.apache.log4j.FileAppender; import org.apache.log4j.LogManager; -import org.apache.log4j.PropertyConfigurator; public class HadoopJobExecHelper { @@ -394,6 +392,12 @@ mapRedStats.setMapOutputRecords(ctr.getValue()); } + ctr = ctrs.findCounter("org.apache.hadoop.mapred.JobInProgress$Counter", + "SLOTS_MILLIS_MAPS"); + if (ctr != null) { + mapRedStats.setMapSlotsMillis(ctr.getValue()); + } + ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", "REDUCE_INPUT_RECORDS"); if (ctr != null) { @@ -406,6 +410,12 @@ mapRedStats.setReduceOutputRecords(ctr.getValue()); } + ctr = ctrs.findCounter("org.apache.hadoop.mapred.JobInProgress$Counter", + "SLOTS_MILLIS_REDUCES"); + if (ctr != null) { + mapRedStats.setReduceSlotsMillis(ctr.getValue()); + } + ctr = ctrs.findCounter("FileSystemCounters", "HDFS_BYTES_READ"); if (ctr != null) { Index: ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java (revision 1164320) +++ ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java (working copy) @@ -34,8 +34,10 @@ long hdfsWrite = -1; long mapInputRecords = -1; long mapOutputRecords = -1; + long mapSlotsMillis = -1; long reduceInputRecords = -1; long reduceOutputRecords = -1; + long reduceSlotsMillis = -1; long reduceShuffleBytes = -1; boolean success; @@ -102,6 +104,14 @@ this.mapOutputRecords = mapOutputRecords; } + public long getMapSlotsMillis() { + return mapSlotsMillis; + } + + public void setMapSlotsMillis(long mapSlotsMillis) { + this.mapSlotsMillis = mapSlotsMillis; + } + public long getReduceInputRecords() { return reduceInputRecords; } @@ -118,6 +128,14 @@ this.reduceOutputRecords = reduceOutputRecords; } + public long getReduceSlotsMillis() { + return reduceSlotsMillis; + } + + public void setReduceSlotsMillis(long reduceSlotsMillis) { + this.reduceSlotsMillis = reduceSlotsMillis; + } + public long getReduceShuffleBytes() { return reduceShuffleBytes; } @@ -149,6 +167,14 @@ sb.append(" Accumulative CPU: " + (cpuMSec / 1000D) + " sec "); } + if (mapSlotsMillis > 0) { + sb.append(" Map time: " + (mapSlotsMillis / 1000D) + " sec "); + } + + if (reduceSlotsMillis > 0) { + sb.append(" Reduce time: " + (reduceSlotsMillis / 1000D) + " sec "); + } + if (hdfsRead >= 0) { sb.append(" HDFS Read: " + hdfsRead); }