Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (revision 1179378) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (working copy) @@ -52,10 +52,8 @@ import org.apache.hadoop.mapred.TaskReport; import org.apache.hadoop.mapred.Counters.Counter; import org.apache.log4j.Appender; -import org.apache.log4j.BasicConfigurator; import org.apache.log4j.FileAppender; import org.apache.log4j.LogManager; -import org.apache.log4j.PropertyConfigurator; public class HadoopJobExecHelper { @@ -393,53 +391,8 @@ } MapRedStats mapRedStats = new MapRedStats(numMap, numReduce, cpuMsec, success, rj.getID().toString()); + mapRedStats.setCounters(ctrs); - if (ctrs != null) { - Counter ctr; - - ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", - "REDUCE_SHUFFLE_BYTES"); - if (ctr != null) { - mapRedStats.setReduceShuffleBytes(ctr.getValue()); - } - - ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", - "MAP_INPUT_RECORDS"); - if (ctr != null) { - mapRedStats.setMapInputRecords(ctr.getValue()); - } - - ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", - "MAP_OUTPUT_RECORDS"); - if (ctr != null) { - mapRedStats.setMapOutputRecords(ctr.getValue()); - } - - ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", - "REDUCE_INPUT_RECORDS"); - if (ctr != null) { - mapRedStats.setReduceInputRecords(ctr.getValue()); - } - - ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", - "REDUCE_OUTPUT_RECORDS"); - if (ctr != null) { - mapRedStats.setReduceOutputRecords(ctr.getValue()); - } - - ctr = ctrs.findCounter("FileSystemCounters", - "HDFS_BYTES_READ"); - if (ctr != null) { - mapRedStats.setHdfsRead(ctr.getValue()); - } - - ctr = ctrs.findCounter("FileSystemCounters", - "HDFS_BYTES_WRITTEN"); - if (ctr != null) { - mapRedStats.setHdfsWrite(ctr.getValue()); - } - } - this.task.setDone(); // update based on the final value of the counters updateCounters(ctrs, rj); Index: ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java (revision 1179378) +++ ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java (working copy) @@ -18,6 +18,9 @@ package org.apache.hadoop.hive.ql; +import org.apache.hadoop.mapred.Counters; +import org.apache.hadoop.mapred.Counters.Counter; + /** * MapRedStats. * @@ -30,13 +33,7 @@ int numMap; int numReduce; long cpuMSec; - long hdfsRead = -1; - long hdfsWrite = -1; - long mapInputRecords = -1; - long mapOutputRecords = -1; - long reduceInputRecords = -1; - long reduceOutputRecords = -1; - long reduceShuffleBytes = -1; + Counters counters = null; boolean success; String jobId; @@ -73,62 +70,14 @@ this.numReduce = numReduce; } - public long getHdfsRead() { - return hdfsRead; + public void setCounters(Counters taskCounters) { + this.counters = taskCounters; } - public void setHdfsRead(long hdfsRead) { - this.hdfsRead = hdfsRead; + public Counters getCounters() { + return this.counters; } - public long getHdfsWrite() { - return hdfsWrite; - } - - public void setHdfsWrite(long hdfsWrite) { - this.hdfsWrite = hdfsWrite; - } - - public long getMapInputRecords() { - return mapInputRecords; - } - - public void setMapInputRecords(long mapInputRecords) { - this.mapInputRecords = mapInputRecords; - } - - public long getMapOutputRecords() { - return mapOutputRecords; - } - - public void setMapOutputRecords(long mapOutputRecords) { - this.mapOutputRecords = mapOutputRecords; - } - - public long getReduceInputRecords() { - return reduceInputRecords; - } - - public void setReduceInputRecords(long reduceInputRecords) { - this.reduceInputRecords = reduceInputRecords; - } - - public long getReduceOutputRecords() { - return reduceOutputRecords; - } - - public void setReduceOutputRecords(long reduceOutputRecords) { - this.reduceOutputRecords = reduceOutputRecords; - } - - public long getReduceShuffleBytes() { - return reduceShuffleBytes; - } - - public void setReduceShuffleBytes(long reduceShuffleBytes) { - this.reduceShuffleBytes = reduceShuffleBytes; - } - public void setCpuMSec(long cpuMSec) { this.cpuMSec = cpuMSec; } @@ -160,12 +109,20 @@ sb.append(" Accumulative CPU: " + (cpuMSec / 1000D) + " sec "); } - if (hdfsRead >= 0) { - sb.append(" HDFS Read: " + hdfsRead); - } + if (counters != null) { + Counter hdfsReadCntr = counters.findCounter("FileSystemCounters", + "HDFS_BYTES_READ"); + long hdfsRead; + if (hdfsReadCntr != null && (hdfsRead = hdfsReadCntr.getValue()) >= 0) { + sb.append(" HDFS Read: " + hdfsRead); + } - if (hdfsWrite >= 0) { - sb.append(" HDFS Write: " + hdfsWrite); + Counter hdfsWrittenCntr = counters.findCounter("FileSystemCounters", + "HDFS_BYTES_WRITTEN"); + long hdfsWritten; + if (hdfsWrittenCntr != null && (hdfsWritten = hdfsWrittenCntr.getValue()) >= 0) { + sb.append(" HDFS Write: " + hdfsWritten); + } } sb.append(" " + (success ? "SUCESS" : "FAIL"));