Call tree (all threads together)

 Name  Time (ms)  Avg. Time (ms)  Own Time (ms)  Invocation Count 
<All threads>
207,625100 %
 org.apache.hadoop.mapred.YarnChild.main(String[])
207,625100 %207,62501
 org.apache.hadoop.security.UserGroupInformation.doAs(PrivilegedExceptionAction)
207,625100 %103,81202
 javax.security.auth.Subject.doAs(Subject, PrivilegedExceptionAction)
207,62499 %103,81202
 org.apache.hadoop.mapred.YarnChild$2.run()
207,62499 %207,62401
 org.apache.hadoop.mapred.MapTask.run(JobConf, TaskUmbilicalProtocol)
207,62499 %207,62401
 org.apache.hadoop.mapred.MapTask.runOldMapper(JobConf, JobSplit$TaskSplitIndex, TaskUmbilicalProtocol, Task$TaskReporter)
207,62499 %207,62411
 org.apache.hadoop.mapred.MapRunner.run(RecordReader, OutputCollector, Reporter)
207,62399 %207,62301
 org.apache.hadoop.mapred.MapTask$TrackedRecordReader.next(Object, Object)
207,62399 %1,8700111
 org.apache.hadoop.mapred.MapTask$TrackedRecordReader.moveToNext(Object, Object)
207,62299 %1,8701111
 org.apache.hadoop.hive.shims.HadoopShimsSecure$CombineFileRecordReader.next(Object, Object)
207,62199 %1,8700111
 org.apache.hadoop.hive.shims.HadoopShimsSecure$CombineFileRecordReader.initNextRecordReader(Object)
207,62199 %41,52405
 java.lang.reflect.Constructor.newInstance(Object[])
207,62199 %51,90504
 sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Object[])
207,62199 %51,90504
 sun.reflect.NativeConstructorAccessorImpl.newInstance(Object[])
207,62199 %51,90504
 org.apache.hadoop.hive.ql.io.CombineHiveRecordReader.<init>(InputSplit, Configuration, Reporter, Integer)
207,62199 %51,90504
 org.apache.hadoop.hive.ql.io.RCFileInputFormat.getRecordReader(InputSplit, JobConf, Reporter)
207,62199 %51,90504
 org.apache.hadoop.hive.ql.io.RCFileRecordReader.<init>(Configuration, FileSplit)
207,62099 %51,90504
 org.apache.hadoop.hive.ql.io.RCFile$Reader.sync(long)
207,28299 %69,09416,9233
 java.io.DataInputStream.readByte()
188,75791 %008,524,938
 org.apache.hadoop.hdfs.DFSInputStream.read()
188,75791 %008,524,938
 org.apache.hadoop.hdfs.DFSInputStream.read(byte[], int, int)
188,75791 %011,4158,524,938
 org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream$ReaderStrategy, int, int)
177,27685 %063,3148,524,938
 org.apache.hadoop.hdfs.DFSInputStream.readBuffer(DFSInputStream$ReaderStrategy, int, int, Map)
99,28448 %008,524,938
 org.apache.hadoop.hdfs.DFSInputStream$ByteArrayStrategy.doRead(BlockReader, int, int)
99,28448 %008,524,938
 org.apache.hadoop.hdfs.BlockReaderLocal.read(byte[], int, int)
99,28448 %019,7298,524,938
 java.nio.DirectByteBuffer.get(byte[], int, int)
35,12517 %008,524,938
 java.nio.ByteBuffer.get(byte[], int, int)
35,12517 %025,6808,524,938
 java.nio.DirectByteBuffer.get()
9,3044 %06,5768,524,938
 java.nio.Buffer.checkBounds(int, int, int)
720 %0728,524,938
 java.nio.Buffer.remaining()
680 %0688,524,938
 org.apache.commons.logging.impl.Log4JLogger.isTraceEnabled()
34,36617 %06,7538,524,938
 org.apache.hadoop.hdfs.BlockReaderLocal.fillSlowReadBuffer(int)
10,0635 %06,9418,524,938
 org.apache.hadoop.fs.FileSystem$Statistics.incrementBytesRead(long)
4,3802 %02,9118,524,938
 java.util.HashMap.<init>()
3,1202 %03,0508,524,938
 org.apache.hadoop.hdfs.DFSInputStream.reportCheckSumFailure(Map, int)
3,0911 %03,0208,524,938
 org.apache.hadoop.hdfs.DFSInputStream.getFileLength()
1,3631 %01,3638,524,938
 java.lang.Math.min(long, long)
1,3621 %01,3628,524,938
 org.apache.hadoop.hdfs.DFSClient.checkOpen()
1,3601 %01,3608,524,938
 org.apache.hadoop.hdfs.DFSInputStream$ByteArrayStrategy.<init>(byte[])
640 %0648,524,938
 org.apache.hadoop.fs.FSDataInputStream.getPos()
1,3831 %01,3838,524,944
 java.io.DataInputStream.readFully(byte[])
2180 %7203
 org.apache.hadoop.hive.ql.io.RCFile$Reader.<init>(FileSystem, Path, Configuration)
3340 %8304
 org.apache.hadoop.fs.Path.getFileSystem(Configuration)
30 %004