Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (revision 1178612) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (working copy) @@ -52,10 +52,8 @@ import org.apache.hadoop.mapred.TaskReport; import org.apache.hadoop.mapred.Counters.Counter; import org.apache.log4j.Appender; -import org.apache.log4j.BasicConfigurator; import org.apache.log4j.FileAppender; import org.apache.log4j.LogManager; -import org.apache.log4j.PropertyConfigurator; public class HadoopJobExecHelper { @@ -389,51 +387,8 @@ } MapRedStats mapRedStats = new MapRedStats(numMap, numReduce, cpuMsec, success, rj.getID().toString()); + mapRedStats.setCounters(ctrs); - Counter ctr; - - ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", - "REDUCE_SHUFFLE_BYTES"); - if (ctr != null) { - mapRedStats.setReduceShuffleBytes(ctr.getValue()); - } - - ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", - "MAP_INPUT_RECORDS"); - if (ctr != null) { - mapRedStats.setMapInputRecords(ctr.getValue()); - } - - ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", - "MAP_OUTPUT_RECORDS"); - if (ctr != null) { - mapRedStats.setMapOutputRecords(ctr.getValue()); - } - - ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", - "REDUCE_INPUT_RECORDS"); - if (ctr != null) { - mapRedStats.setReduceInputRecords(ctr.getValue()); - } - - ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", - "REDUCE_OUTPUT_RECORDS"); - if (ctr != null) { - mapRedStats.setReduceOutputRecords(ctr.getValue()); - } - - ctr = ctrs.findCounter("FileSystemCounters", - "HDFS_BYTES_READ"); - if (ctr != null) { - mapRedStats.setHdfsRead(ctr.getValue()); - } - - ctr = ctrs.findCounter("FileSystemCounters", - "HDFS_BYTES_WRITTEN"); - if (ctr != null) { - mapRedStats.setHdfsWrite(ctr.getValue()); - } - this.task.setDone(); // update based on the final value of the counters updateCounters(ctrs, rj); Index: ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java (revision 1178612) +++ ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java (working copy) @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql; +import org.apache.hadoop.mapred.Counters; + /** * MapRedStats. * @@ -30,13 +32,7 @@ int numMap; int numReduce; long cpuMSec; - long hdfsRead = -1; - long hdfsWrite = -1; - long mapInputRecords = -1; - long mapOutputRecords = -1; - long reduceInputRecords = -1; - long reduceOutputRecords = -1; - long reduceShuffleBytes = -1; + Counters counters = null; boolean success; String jobId; @@ -73,62 +69,14 @@ this.numReduce = numReduce; } - public long getHdfsRead() { - return hdfsRead; + public void setCounters(Counters taskCounters) { + this.counters = taskCounters; } - public void setHdfsRead(long hdfsRead) { - this.hdfsRead = hdfsRead; + public Counters getCounters() { + return this.counters; } - public long getHdfsWrite() { - return hdfsWrite; - } - - public void setHdfsWrite(long hdfsWrite) { - this.hdfsWrite = hdfsWrite; - } - - public long getMapInputRecords() { - return mapInputRecords; - } - - public void setMapInputRecords(long mapInputRecords) { - this.mapInputRecords = mapInputRecords; - } - - public long getMapOutputRecords() { - return mapOutputRecords; - } - - public void setMapOutputRecords(long mapOutputRecords) { - this.mapOutputRecords = mapOutputRecords; - } - - public long getReduceInputRecords() { - return reduceInputRecords; - } - - public void setReduceInputRecords(long reduceInputRecords) { - this.reduceInputRecords = reduceInputRecords; - } - - public long getReduceOutputRecords() { - return reduceOutputRecords; - } - - public void setReduceOutputRecords(long reduceOutputRecords) { - this.reduceOutputRecords = reduceOutputRecords; - } - - public long getReduceShuffleBytes() { - return reduceShuffleBytes; - } - - public void setReduceShuffleBytes(long reduceShuffleBytes) { - this.reduceShuffleBytes = reduceShuffleBytes; - } - public void setCpuMSec(long cpuMSec) { this.cpuMSec = cpuMSec; } @@ -160,10 +108,14 @@ sb.append(" Accumulative CPU: " + (cpuMSec / 1000D) + " sec "); } + long hdfsRead = counters.findCounter("FileSystemCounters", + "HDFS_BYTES_READ").getValue(); if (hdfsRead >= 0) { sb.append(" HDFS Read: " + hdfsRead); } + long hdfsWrite = counters.findCounter("FileSystemCounters", + "HDFS_BYTES_WRITTEN").getValue(); if (hdfsWrite >= 0) { sb.append(" HDFS Write: " + hdfsWrite); }