Call tree (by thread)

 Name  Time (ms)  Avg. Time (ms)  Own Time (ms)  Invocation Count 
 main native ID: 0x6DC2 group: 'main'
798,926100 %
 org.apache.hadoop.mapred.YarnChild.main(String[])
798,926100 %798,92681
 org.apache.hadoop.security.UserGroupInformation.doAs(PrivilegedExceptionAction)
778,26397 %389,13102
 javax.security.auth.Subject.doAs(Subject, PrivilegedExceptionAction)
778,26397 %389,13102
 org.apache.hadoop.mapred.YarnChild$2.run()
777,80197 %777,80101
 org.apache.hadoop.mapred.MapTask.run(JobConf, TaskUmbilicalProtocol)
776,20797 %776,20701
 org.apache.hadoop.mapred.MapTask.runOldMapper(JobConf, JobSplit$TaskSplitIndex, TaskUmbilicalProtocol, Task$TaskReporter)
775,16597 %775,16501
 org.apache.hadoop.mapred.MapRunner.run(RecordReader, OutputCollector, Reporter)
747,57594 %747,5754,0091
 org.apache.hadoop.hive.ql.exec.ExecMapper.map(Object, Object, OutputCollector, Reporter)
657,33582 %02,1151,093,647
 org.apache.hadoop.hive.ql.exec.MapOperator.process(Writable)
654,32982 %6,2310105
 org.apache.hadoop.hive.ql.exec.Operator.forward(Object, ObjectInspector)
654,25482 %6,2300105
 org.apache.hadoop.hive.ql.exec.Operator.process(Object, int)
654,24082 %6,2900104
 org.apache.hadoop.hive.ql.exec.TableScanOperator.processOp(Object, int)
654,24082 %6,2900104
 org.apache.hadoop.hive.ql.exec.Operator.forward(Object, ObjectInspector)
654,24082 %6,2900104
 org.apache.hadoop.hive.ql.exec.Operator.process(Object, int)
654,23982 %6,3510103
 org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator.processOp(Object, int)
654,23982 %6,3510103
 org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator.setUpFetchContexts(String, SMBMapJoinOperator$MergeQueue)
650,40781 %650,40701
 org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator$MergeQueue.setupContext(List)
649,35381 %649,35341
 org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator$MergeQueue.nextHive(Integer)
644,59781 %2,4690261
 org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator$MergeQueue.next(Integer)
644,59781 %2,4693261
 org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow()
644,54781 %2,4691261
 org.apache.hadoop.hive.ql.exec.FetchOperator.getRecordReader()
643,03080 %2,46336261
 org.apache.hadoop.hive.ql.exec.FetchOperator.getNextPath()
625,60378 %2,39610261
 org.apache.hadoop.hive.ql.exec.FetchOperator.listStatusUnderPath(FileSystem, Path)
623,59778 %2,3898261
 org.apache.hadoop.hive.conf.HiveConf.<init>(Configuration, Class)
622,05278 %2,3831261
 org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(Path)
1,4960 %53261
 org.apache.hadoop.hive.conf.HiveConf.getBoolVar(HiveConf$ConfVars)
410 %00261
 org.apache.hadoop.fs.FileSystem.exists(Path)
1,6580 %60261
 org.apache.hadoop.fs.Path.getFileSystem(Configuration)
3360 %10261
 org.apache.hadoop.mapred.FileInputFormat.getSplits(JobConf, int)
7,9281 %309261
 org.apache.hadoop.hive.ql.io.RCFileInputFormat.getRecordReader(InputSplit, JobConf, Reporter)
7,8091 %290261
 org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe.initialize(Configuration, Properties)
1,0430 %113522
 org.apache.hadoop.hive.ql.exec.FetchOperator.getRowInspectorFromPartition(PartitionDesc, ObjectInspector)
2260 %02261
 org.apache.hadoop.util.StringUtils.escapeString(String)
1200 %00261
 java.lang.Class.newInstance()
930 %00522
 org.apache.hadoop.hive.ql.io.RCFileRecordReader.createValue()
690 %00261
 org.apache.hadoop.hive.ql.io.RCFileRecordReader.createKey()
490 %00261
 org.apache.hadoop.conf.Configuration.set(String, String)
360 %00261
 org.apache.hadoop.fs.Path.toString()
70 %04261
 org.apache.hadoop.hive.ql.exec.FetchOperator$FetchInputFormatSplit.<init>(InputSplit, String)
30 %00261
 org.apache.hadoop.hive.ql.io.RCFileRecordReader.next(Object, Object)
1,5140 %50261
 org.apache.hadoop.hive.ql.exec.JoinUtil.computeKeys(Object, List, List)
340 %02261
 org.apache.hadoop.hive.ql.exec.Operator.process(Object, int)
50 %00261
 org.apache.hadoop.hive.ql.exec.DummyStoreOperator.reset()
50 %00261
 org.apache.hadoop.mapred.JobConf.<init>(Configuration)
3,8930 %140261
 org.apache.hadoop.hive.ql.exec.FetchOperator.setupContext(List)
8020 %31261
 org.apache.hadoop.hive.ql.exec.FetchOperator.<init>(FetchWork, JobConf)
180 %00261
 org.apache.hadoop.util.PriorityQueue.put(Object)
120 %00261
 org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator$MergeQueue.segmentsForSize(int)
90 %901
 java.util.Arrays.asList(Object[])
90 %09261
 java.lang.Integer.valueOf(int)
50 %05522
 org.apache.hadoop.hive.ql.exec.DefaultBucketMatcher.getAliasBucketFiles(String, String, String)
9960 %996181
 org.apache.hadoop.hive.ql.plan.BucketMapJoinContext.createFileId(String)
560 %5601
 org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator.joinOneGroup()
3,0440 %3,04401
 org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator.fetchNextGroup(Byte)
7750 %77521
 org.apache.hadoop.hive.ql.exec.persistence.RowContainer.add(List)
80 %08103
 org.apache.hadoop.hive.ql.exec.JoinUtil.computeKeys(Object, List, List)
20 %01103
 org.apache.commons.logging.impl.Log4JLogger.info(Object)
140 %403
 org.apache.hadoop.hive.ql.exec.Operator.cleanUpInputFileChanged()
570 %5701
 org.apache.hadoop.hive.serde2.columnar.ColumnarSerDeBase.deserialize(Writable)
160 %00105
  org.apache.hadoop.hive.ql.exec.Operator.getDone()
≥ 5940 %≈ 0≈ 594≥ 696,904
  org.apache.hadoop.hive.ql.exec.ExecMapperContext.resetRow()
≥ 2950 %≈ 0≈ 295≥ 380,001
 org.apache.hadoop.mapred.MapTask$TrackedRecordReader.next(Object, Object)
86,18711 %02,7851,093,648
 org.apache.hadoop.hive.ql.exec.ExecMapper.close()
410 %4101
 org.apache.hadoop.mapred.MapTask$TrackedRecordReader.<init>(MapTask, Task$TaskReporter, JobConf)
24,6223 %24,62201
 org.apache.hadoop.util.ReflectionUtils.newInstance(Class, Configuration)
1,2420 %1,24201
 org.apache.hadoop.mapred.MapTask.getSplitDetails(Path, long)
1,2100 %1,21061
 org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush()
2420 %24201
 org.apache.hadoop.mapred.MapTask.createSortingCollector(JobConf, Task$TaskReporter)
2360 %23601
 org.apache.hadoop.mapred.MapTask$OldOutputCollector.<init>(MapOutputCollector, JobConf)
80 %801
 org.apache.hadoop.mapred.JobConf.getMapRunnerClass()
80 %801
 java.lang.ClassLoader.loadClass(String)
70 %302
 org.apache.hadoop.fs.Path.<init>(String)
60 %601
 org.apache.hadoop.mapred.Task.statusUpdate(TaskUmbilicalProtocol)
20 %201
 org.apache.hadoop.mapred.MapTask.updateJobWithSplit(JobConf, InputSplit)
10 %101
 org.apache.hadoop.mapred.Task.initialize(JobConf, JobID, Reporter, boolean)
8180 %81811
 org.apache.hadoop.mapred.Task.done(TaskUmbilicalProtocol, Task$TaskReporter)
2170 %21701
 org.apache.hadoop.mapred.Task.startReporter(TaskUmbilicalProtocol)
50 %501
 org.apache.hadoop.fs.FileSystem.get(Configuration)
1,5920 %1,59201
 org.apache.hadoop.mapred.JobConf.getWorkingDirectory()
10 %101
 org.apache.hadoop.mapred.YarnChild$1.run()
4180 %41801
 javax.security.auth.Subject.createContext(Subject, AccessControlContext)
430 %2102
 org.apache.hadoop.mapred.YarnChild.configureTask(Task, Credentials, Token)
10,4831 %10,48311
 org.apache.hadoop.security.UserGroupInformation.setConfiguration(Configuration)
5,4771 %5,47701
 org.apache.hadoop.mapred.JobConf.<init>()
1,8200 %1,82001
 $Proxy9.getTask(JvmContext)
1,3350 %1,33501
 org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.initialize(String)
7620 %76201
 org.apache.hadoop.security.UserGroupInformation.<clinit>()
2310 %23101
 org.apache.hadoop.mapred.JobConf.<clinit>()
1400 %14001
 java.lang.ClassLoader.loadClass(String)
1190 %2047
 org.apache.hadoop.security.UserGroupInformation.getCurrentUser()
1130 %11301
 org.apache.commons.logging.impl.Log4JLogger.info(Object)
630 %2103
 org.apache.hadoop.mapred.TaskAttemptID.forName(String)
490 %4901
 org.apache.hadoop.metrics2.source.JvmMetrics.initSingleton(String, String)
100 %1001
 org.apache.hadoop.net.NetUtils.<clinit>()
90 %901
 org.apache.hadoop.util.StringUtils.camelize(String)
90 %901
 org.apache.log4j.LogManager.shutdown()
80 %801
 org.apache.hadoop.conf.Configuration.<clinit>()
60 %601
 org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.shutdown()
40 %401
 org.apache.hadoop.net.NetUtils.createSocketAddrForHost(String, int)
20 %201
 org.apache.hadoop.ipc.RPC.stopProxy(Object)
10 %101