diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java index 5d6c9da..161fc25 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java @@ -58,7 +58,6 @@ import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument.TruthValue; import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory; -import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -122,7 +121,6 @@ private static final long DEFAULT_MIN_SPLIT_SIZE = 16 * 1024 * 1024; private static final long DEFAULT_MAX_SPLIT_SIZE = 256 * 1024 * 1024; - private static final PerfLogger perfLogger = PerfLogger.getPerfLogger(); private static final String CLASS_NAME = ReaderImpl.class.getName(); /** @@ -1048,9 +1046,7 @@ private static void cancelFutures(List> futures) { @Override public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { - perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ORC_GET_SPLITS); List result = generateSplitsInfo(job); - perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ORC_GET_SPLITS); return result.toArray(new InputSplit[result.size()]); } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java index b6ad0dc..1bb9fa8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java @@ -22,9 +22,7 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.InputFormat; @@ -37,7 +35,6 @@ /** An InputFormat for ORC files. Keys are meaningless, * value is the OrcStruct object */ public class OrcNewInputFormat extends InputFormat{ - private static final PerfLogger perfLogger = PerfLogger.getPerfLogger(); private static final String CLASS_NAME = ReaderImpl.class.getName(); @Override @@ -117,7 +114,6 @@ public boolean nextKeyValue() throws IOException, InterruptedException { @Override public List getSplits(JobContext jobContext) throws IOException, InterruptedException { - perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ORC_GET_SPLITS); List splits = OrcInputFormat.generateSplitsInfo(ShimLoader.getHadoopShims() .getConfiguration(jobContext)); @@ -125,7 +121,6 @@ public boolean nextKeyValue() throws IOException, InterruptedException { for(OrcSplit split: splits) { result.add(new OrcNewSplit(split)); } - perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ORC_GET_SPLITS); return result; }