Logged in as: dr.who Application History Applications FINISHED FAILED KILLED Log Type: syslog Log Upload Time: Tue Apr 28 15:10:57 +0530 2015 Log Length: 1428305 2015-04-28 15:10:26,613 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: JvmMetrics, JVM related metrics etc. 2015-04-28 15:10:26,624 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.jobsSubmitted with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,632 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.jobsCompleted with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,633 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.jobsFailed with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,633 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.jobsKilled with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,633 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.jobsPreparing with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,633 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.jobsRunning with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,633 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.mapsLaunched with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,634 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.mapsCompleted with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,634 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.mapsFailed with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,634 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.mapsKilled with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,634 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.mapsRunning with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,634 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.mapsWaiting with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,635 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.reducesLaunched with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,635 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.reducesCompleted with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,635 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.reducesFailed with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,636 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.reducesKilled with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,636 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.reducesRunning with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,636 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics.reducesWaiting with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:26,637 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: MRAppMetrics, MR App Metrics 2015-04-28 15:10:26,650 INFO [main] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: Created MRAppMaster for application appattempt_1430213948957_0001_000001 2015-04-28 15:10:26,744 DEBUG [main] org.apache.hadoop.util.Shell: setsid exited with exit code 0 2015-04-28 15:10:26,899 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Rate of successful kerberos logins and latency (milliseconds)]) 2015-04-28 15:10:26,900 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Rate of failed kerberos logins and latency (milliseconds)]) 2015-04-28 15:10:26,900 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[GetGroups]) 2015-04-28 15:10:26,901 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: UgiMetrics, User and group related metrics 2015-04-28 15:10:26,962 DEBUG [main] org.apache.hadoop.security.Groups: Creating new Groups object 2015-04-28 15:10:26,964 DEBUG [main] org.apache.hadoop.util.NativeCodeLoader: Trying to load the custom-built native-hadoop library... 2015-04-28 15:10:26,964 DEBUG [main] org.apache.hadoop.util.NativeCodeLoader: Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path 2015-04-28 15:10:26,964 DEBUG [main] org.apache.hadoop.util.NativeCodeLoader: java.library.path=/opt/bibin/dsperf/HADOP2_7/nmlocal/usercache/dsperf/appcache/application_1430213948957_0001/container_1430213948957_0001_01_000001:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib 2015-04-28 15:10:26,964 WARN [main] org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2015-04-28 15:10:26,965 DEBUG [main] org.apache.hadoop.util.PerformanceAdvisory: Falling back to shell based 2015-04-28 15:10:26,965 DEBUG [main] org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback: Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping 2015-04-28 15:10:26,996 DEBUG [main] org.apache.hadoop.security.Groups: Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000 2015-04-28 15:10:26,999 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: hadoop login 2015-04-28 15:10:26,999 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: hadoop login commit 2015-04-28 15:10:27,002 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: using local user:UnixPrincipal: dsperf 2015-04-28 15:10:27,002 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: Using user: "UnixPrincipal: dsperf" with name dsperf 2015-04-28 15:10:27,002 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: User entry: "dsperf" 2015-04-28 15:10:27,007 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: UGI loginUser:dsperf (auth:SIMPLE) 2015-04-28 15:10:27,008 INFO [main] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: Executing with tokens: 2015-04-28 15:10:27,008 INFO [main] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: Kind: YARN_AM_RM_TOKEN, Service: , Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } keyId: -524530517) 2015-04-28 15:10:27,098 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.mapreduce.v2.app.MRAppMaster.initAndStartAppMaster(MRAppMaster.java:1493) 2015-04-28 15:10:27,099 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.MRAppMaster entered state INITED 2015-04-28 15:10:27,115 INFO [main] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: Using mapred newApiCommitter. 2015-04-28 15:10:27,220 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,220 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,220 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,220 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,254 DEBUG [main] org.apache.hadoop.hdfs.HAUtil: No HA service delegation token found for logical URI hdfs://hacluster 2015-04-28 15:10:27,254 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,254 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,254 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,254 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,261 DEBUG [main] org.apache.hadoop.io.retry.RetryUtils: multipleLinearRandomRetry = null 2015-04-28 15:10:27,274 DEBUG [main] org.apache.hadoop.ipc.Server: rpcKind=RPC_PROTOCOL_BUFFER, rpcRequestWrapperClass=class org.apache.hadoop.ipc.ProtobufRpcEngine$RpcRequestWrapper, rpcInvoker=org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker@49b2a47d 2015-04-28 15:10:27,278 DEBUG [main] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:27,536 DEBUG [main] org.apache.hadoop.util.PerformanceAdvisory: Both short-circuit local reads and UNIX domain socket are disabled. 2015-04-28 15:10:27,542 DEBUG [main] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection 2015-04-28 15:10:27,557 DEBUG [main] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:27,557 DEBUG [main] org.apache.hadoop.ipc.Client: Connecting to /IP127:65110 2015-04-28 15:10:27,568 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:27,580 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:27,587 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"A7a/cFeuoUcoI6Nxhmo0XYEMS2CncnR/Z3YSLEcv\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:27,588 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB info:@org.apache.hadoop.security.token.TokenInfo(value=class org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSelector) 2015-04-28 15:10:27,588 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Use SIMPLE authentication for protocol ClientNamenodeProtocolPB 2015-04-28 15:10:27,589 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:27,593 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf: starting, having connections 1 2015-04-28 15:10:27,595 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #0 2015-04-28 15:10:27,598 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #0 2015-04-28 15:10:27,598 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 51ms 2015-04-28 15:10:27,670 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #1 2015-04-28 15:10:27,672 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #1 2015-04-28 15:10:27,672 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 3ms 2015-04-28 15:10:27,673 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #2 2015-04-28 15:10:27,674 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #2 2015-04-28 15:10:27,674 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:27,674 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #3 2015-04-28 15:10:27,676 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #3 2015-04-28 15:10:27,676 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:27,678 INFO [main] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: OutputCommitter set in config null 2015-04-28 15:10:27,723 INFO [main] org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 2015-04-28 15:10:27,726 INFO [main] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter 2015-04-28 15:10:27,728 DEBUG [main] org.apache.hadoop.service.CompositeService: Adding service Dispatcher 2015-04-28 15:10:27,734 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.client.MRClientService entered state INITED 2015-04-28 15:10:27,736 DEBUG [main] org.apache.hadoop.service.CompositeService: Adding service CommitterEventHandler 2015-04-28 15:10:27,742 DEBUG [main] org.apache.hadoop.service.CompositeService: Adding service org.apache.hadoop.mapred.TaskAttemptListenerImpl 2015-04-28 15:10:27,749 INFO [main] org.apache.hadoop.yarn.event.AsyncDispatcher: Registering class org.apache.hadoop.mapreduce.jobhistory.EventType for class org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler 2015-04-28 15:10:27,750 INFO [main] org.apache.hadoop.yarn.event.AsyncDispatcher: Registering class org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType for class org.apache.hadoop.mapreduce.v2.app.MRAppMaster$JobEventDispatcher 2015-04-28 15:10:27,751 INFO [main] org.apache.hadoop.yarn.event.AsyncDispatcher: Registering class org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType for class org.apache.hadoop.mapreduce.v2.app.MRAppMaster$TaskEventDispatcher 2015-04-28 15:10:27,752 INFO [main] org.apache.hadoop.yarn.event.AsyncDispatcher: Registering class org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType for class org.apache.hadoop.mapreduce.v2.app.MRAppMaster$TaskAttemptEventDispatcher 2015-04-28 15:10:27,752 INFO [main] org.apache.hadoop.yarn.event.AsyncDispatcher: Registering class org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventType for class org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventHandler 2015-04-28 15:10:27,753 INFO [main] org.apache.hadoop.yarn.event.AsyncDispatcher: Registering class org.apache.hadoop.mapreduce.v2.app.speculate.Speculator$EventType for class org.apache.hadoop.mapreduce.v2.app.MRAppMaster$SpeculatorEventDispatcher 2015-04-28 15:10:27,753 DEBUG [main] org.apache.hadoop.service.CompositeService: Adding service org.apache.hadoop.mapreduce.v2.app.MRAppMaster$StagingDirCleaningService 2015-04-28 15:10:27,753 DEBUG [main] org.apache.hadoop.service.CompositeService: Adding service org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter 2015-04-28 15:10:27,753 INFO [main] org.apache.hadoop.yarn.event.AsyncDispatcher: Registering class org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator$EventType for class org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter 2015-04-28 15:10:27,754 DEBUG [main] org.apache.hadoop.service.CompositeService: Adding service org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerLauncherRouter 2015-04-28 15:10:27,754 INFO [main] org.apache.hadoop.yarn.event.AsyncDispatcher: Registering class org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher$EventType for class org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerLauncherRouter 2015-04-28 15:10:27,754 DEBUG [main] org.apache.hadoop.service.CompositeService: Adding service JobHistoryEventHandler 2015-04-28 15:10:27,754 DEBUG [main] org.apache.hadoop.service.CompositeService: org.apache.hadoop.mapreduce.v2.app.MRAppMaster: initing services, size=7 2015-04-28 15:10:27,754 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: Dispatcher entered state INITED 2015-04-28 15:10:27,754 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: CommitterEventHandler entered state INITED 2015-04-28 15:10:27,755 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapred.TaskAttemptListenerImpl entered state INITED 2015-04-28 15:10:27,756 DEBUG [main] org.apache.hadoop.service.CompositeService: Adding service TaskHeartbeatHandler 2015-04-28 15:10:27,756 DEBUG [main] org.apache.hadoop.service.CompositeService: org.apache.hadoop.mapred.TaskAttemptListenerImpl: initing services, size=1 2015-04-28 15:10:27,756 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: TaskHeartbeatHandler entered state INITED 2015-04-28 15:10:27,756 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.MRAppMaster$StagingDirCleaningService entered state INITED 2015-04-28 15:10:27,756 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter entered state INITED 2015-04-28 15:10:27,757 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerLauncherRouter entered state INITED 2015-04-28 15:10:27,757 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: JobHistoryEventHandler entered state INITED 2015-04-28 15:10:27,783 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.fs.FileContext.getAbstractFileSystem(FileContext.java:331) 2015-04-28 15:10:27,788 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,788 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,788 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,788 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,789 DEBUG [main] org.apache.hadoop.hdfs.HAUtil: No HA service delegation token found for logical URI hdfs://hacluster 2015-04-28 15:10:27,789 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,789 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,789 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,789 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,789 DEBUG [main] org.apache.hadoop.io.retry.RetryUtils: multipleLinearRandomRetry = null 2015-04-28 15:10:27,789 DEBUG [main] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:27,790 DEBUG [main] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection 2015-04-28 15:10:27,791 INFO [main] org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils: Default file system [hdfs://hacluster:8020] 2015-04-28 15:10:27,816 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.fs.FileContext.getAbstractFileSystem(FileContext.java:331) 2015-04-28 15:10:27,817 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,817 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,817 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,817 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,818 DEBUG [main] org.apache.hadoop.hdfs.HAUtil: No HA service delegation token found for logical URI hdfs://hacluster 2015-04-28 15:10:27,818 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,818 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,818 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,818 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,818 DEBUG [main] org.apache.hadoop.io.retry.RetryUtils: multipleLinearRandomRetry = null 2015-04-28 15:10:27,819 DEBUG [main] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:27,819 DEBUG [main] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection 2015-04-28 15:10:27,819 INFO [main] org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils: Default file system [hdfs://hacluster:8020] 2015-04-28 15:10:27,844 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.fs.FileContext.getAbstractFileSystem(FileContext.java:331) 2015-04-28 15:10:27,845 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,845 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,845 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,845 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,846 DEBUG [main] org.apache.hadoop.hdfs.HAUtil: No HA service delegation token found for logical URI hdfs://hacluster 2015-04-28 15:10:27,846 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,846 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,846 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,847 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,847 DEBUG [main] org.apache.hadoop.io.retry.RetryUtils: multipleLinearRandomRetry = null 2015-04-28 15:10:27,847 DEBUG [main] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:27,847 DEBUG [main] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection 2015-04-28 15:10:27,847 INFO [main] org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils: Default file system [hdfs://hacluster:8020] 2015-04-28 15:10:27,848 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.fs.FileContext.getAbstractFileSystem(FileContext.java:331) 2015-04-28 15:10:27,848 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,848 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,848 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,849 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,850 DEBUG [main] org.apache.hadoop.hdfs.HAUtil: No HA service delegation token found for logical URI hdfs://hacluster 2015-04-28 15:10:27,850 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,850 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,850 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,850 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,850 DEBUG [main] org.apache.hadoop.io.retry.RetryUtils: multipleLinearRandomRetry = null 2015-04-28 15:10:27,851 DEBUG [main] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:27,851 DEBUG [main] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection 2015-04-28 15:10:27,852 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,852 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,852 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,852 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,853 DEBUG [main] org.apache.hadoop.hdfs.HAUtil: No HA service delegation token found for logical URI hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001 2015-04-28 15:10:27,853 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,854 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,854 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,854 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,854 DEBUG [main] org.apache.hadoop.io.retry.RetryUtils: multipleLinearRandomRetry = null 2015-04-28 15:10:27,854 DEBUG [main] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:27,854 DEBUG [main] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection 2015-04-28 15:10:27,855 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #4 2015-04-28 15:10:27,857 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #4 2015-04-28 15:10:27,857 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:27,857 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.fs.FileContext.getAbstractFileSystem(FileContext.java:331) 2015-04-28 15:10:27,858 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,858 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,858 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,858 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,859 DEBUG [main] org.apache.hadoop.hdfs.HAUtil: No HA service delegation token found for logical URI hdfs://hacluster 2015-04-28 15:10:27,860 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,860 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,860 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,860 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,860 DEBUG [main] org.apache.hadoop.io.retry.RetryUtils: multipleLinearRandomRetry = null 2015-04-28 15:10:27,860 DEBUG [main] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:27,860 DEBUG [main] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection 2015-04-28 15:10:27,861 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #5 2015-04-28 15:10:27,863 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #5 2015-04-28 15:10:27,863 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:27,863 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.fs.FileContext.getAbstractFileSystem(FileContext.java:331) 2015-04-28 15:10:27,864 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,864 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,864 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,864 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,865 DEBUG [main] org.apache.hadoop.hdfs.HAUtil: No HA service delegation token found for logical URI hdfs://hacluster 2015-04-28 15:10:27,866 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 2015-04-28 15:10:27,866 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false 2015-04-28 15:10:27,866 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 2015-04-28 15:10:27,866 DEBUG [main] org.apache.hadoop.hdfs.BlockReaderLocal: dfs.domain.socket.path = 2015-04-28 15:10:27,866 DEBUG [main] org.apache.hadoop.io.retry.RetryUtils: multipleLinearRandomRetry = null 2015-04-28 15:10:27,866 DEBUG [main] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:27,867 DEBUG [main] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection 2015-04-28 15:10:27,868 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #6 2015-04-28 15:10:27,869 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #6 2015-04-28 15:10:27,869 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:27,870 DEBUG [main] org.apache.hadoop.hdfs.DFSClient: /staging-dir/history/done_intermediate/dsperf: masked=rwxr-x--- 2015-04-28 15:10:27,874 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #7 2015-04-28 15:10:27,907 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #7 2015-04-28 15:10:27,907 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: mkdirs took 33ms 2015-04-28 15:10:27,909 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #8 2015-04-28 15:10:27,911 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #8 2015-04-28 15:10:27,911 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:27,911 INFO [main] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Perms after creating 488, Expected: 504 2015-04-28 15:10:27,911 INFO [main] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Explicitly setting permissions to : 504, rwxrwx--- 2015-04-28 15:10:27,914 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #9 2015-04-28 15:10:27,932 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #9 2015-04-28 15:10:27,932 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: setPermission took 19ms 2015-04-28 15:10:27,934 INFO [main] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Emitting job history data to the timeline server is not enabled 2015-04-28 15:10:27,966 INFO [main] org.apache.hadoop.yarn.event.AsyncDispatcher: Registering class org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent$Type for class org.apache.hadoop.mapreduce.v2.app.MRAppMaster$JobFinishEventHandler 2015-04-28 15:10:28,125 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: from system property: null 2015-04-28 15:10:28,125 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: from environment variable: null 2015-04-28 15:10:28,149 DEBUG [main] org.apache.commons.configuration.ConfigurationUtils: ConfigurationUtils.locate(): base is null, name is hadoop-metrics2-mrappmaster.properties 2015-04-28 15:10:28,152 DEBUG [main] org.apache.commons.configuration.ConfigurationUtils: ConfigurationUtils.locate(): base is null, name is hadoop-metrics2.properties 2015-04-28 15:10:28,152 DEBUG [main] org.apache.commons.configuration.ConfigurationUtils: Loading configuration from the context classpath (hadoop-metrics2.properties) 2015-04-28 15:10:28,176 INFO [main] org.apache.hadoop.metrics2.impl.MetricsConfig: loaded properties from hadoop-metrics2.properties 2015-04-28 15:10:28,178 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: *.sink.file.class = org.apache.hadoop.metrics2.sink.FileSink *.period = 10 2015-04-28 15:10:28,178 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: 2015-04-28 15:10:28,182 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: period 2015-04-28 15:10:28,187 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableStat org.apache.hadoop.metrics2.impl.MetricsSystemImpl.snapshotStat with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Snapshot, Snapshot stats]) 2015-04-28 15:10:28,188 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableStat org.apache.hadoop.metrics2.impl.MetricsSystemImpl.publishStat with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Publish, Publishing stats]) 2015-04-28 15:10:28,188 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.metrics2.impl.MetricsSystemImpl.droppedPubAll with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Dropped updates by all sinks]) 2015-04-28 15:10:28,192 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: source.source.start_mbeans 2015-04-28 15:10:28,192 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'MetricsConfig' for key: source.start_mbeans 2015-04-28 15:10:28,192 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: *.source.start_mbeans 2015-04-28 15:10:28,255 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating attr cache... 2015-04-28 15:10:28,256 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done. # tags & metrics=10 2015-04-28 15:10:28,256 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating info cache... 2015-04-28 15:10:28,256 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: [javax.management.MBeanAttributeInfo[description=Metrics context, name=tag.Context, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of active metrics sources, name=NumActiveSources, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of all registered metrics sources, name=NumAllSources, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of active metrics sinks, name=NumActiveSinks, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of all registered metrics sinks, name=NumAllSinks, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of ops for snapshot stats, name=SnapshotNumOps, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Average time for snapshot stats, name=SnapshotAvgTime, type=java.lang.Double, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of ops for publishing stats, name=PublishNumOps, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Average time for publishing stats, name=PublishAvgTime, type=java.lang.Double, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Dropped updates by all sinks, name=DroppedPubAll, type=java.lang.Long, read-only, descriptor={}]] 2015-04-28 15:10:28,256 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done 2015-04-28 15:10:28,256 DEBUG [main] org.apache.hadoop.metrics2.util.MBeans: Registered Hadoop:service=MRAppMaster,name=MetricsSystem,sub=Stats 2015-04-28 15:10:28,256 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: MBean for source MetricsSystem,sub=Stats registered. 2015-04-28 15:10:28,258 INFO [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Scheduled snapshot period at 10 second(s). 2015-04-28 15:10:28,258 INFO [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: MRAppMaster metrics system started 2015-04-28 15:10:28,258 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: source.source.start_mbeans 2015-04-28 15:10:28,258 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'MetricsConfig' for key: source.start_mbeans 2015-04-28 15:10:28,258 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: *.source.start_mbeans 2015-04-28 15:10:28,260 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating attr cache... 2015-04-28 15:10:28,260 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done. # tags & metrics=27 2015-04-28 15:10:28,260 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating info cache... 2015-04-28 15:10:28,260 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: [javax.management.MBeanAttributeInfo[description=Metrics context, name=tag.Context, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Process name, name=tag.ProcessName, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Session ID, name=tag.SessionId, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Local hostname, name=tag.Hostname, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Non-heap memory used in MB, name=MemNonHeapUsedM, type=java.lang.Float, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Non-heap memory committed in MB, name=MemNonHeapCommittedM, type=java.lang.Float, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Non-heap memory max in MB, name=MemNonHeapMaxM, type=java.lang.Float, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Heap memory used in MB, name=MemHeapUsedM, type=java.lang.Float, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Heap memory committed in MB, name=MemHeapCommittedM, type=java.lang.Float, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Heap memory max in MB, name=MemHeapMaxM, type=java.lang.Float, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Max memory size in MB, name=MemMaxM, type=java.lang.Float, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=GC Count for PS Scavenge, name=GcCountPS Scavenge, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=GC Time for PS Scavenge, name=GcTimeMillisPS Scavenge, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=GC Count for PS MarkSweep, name=GcCountPS MarkSweep, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=GC Time for PS MarkSweep, name=GcTimeMillisPS MarkSweep, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Total GC count, name=GcCount, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Total GC time in milliseconds, name=GcTimeMillis, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of new threads, name=ThreadsNew, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of runnable threads, name=ThreadsRunnable, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of blocked threads, name=ThreadsBlocked, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of waiting threads, name=ThreadsWaiting, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of timed waiting threads, name=ThreadsTimedWaiting, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of terminated threads, name=ThreadsTerminated, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Total number of fatal log events, name=LogFatal, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Total number of error log events, name=LogError, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Total number of warning log events, name=LogWarn, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Total number of info log events, name=LogInfo, type=java.lang.Long, read-only, descriptor={}]] 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.util.MBeans: Registered Hadoop:service=MRAppMaster,name=JvmMetrics 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: MBean for source JvmMetrics registered. 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Registered source JvmMetrics 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: source.source.start_mbeans 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'MetricsConfig' for key: source.start_mbeans 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: *.source.start_mbeans 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating attr cache... 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done. # tags & metrics=8 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating info cache... 2015-04-28 15:10:28,261 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: [javax.management.MBeanAttributeInfo[description=Metrics context, name=tag.Context, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Local hostname, name=tag.Hostname, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of ops for rate of successful kerberos logins and latency (milliseconds), name=LoginSuccessNumOps, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Average time for rate of successful kerberos logins and latency (milliseconds), name=LoginSuccessAvgTime, type=java.lang.Double, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of ops for rate of failed kerberos logins and latency (milliseconds), name=LoginFailureNumOps, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Average time for rate of failed kerberos logins and latency (milliseconds), name=LoginFailureAvgTime, type=java.lang.Double, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of ops for getGroups, name=GetGroupsNumOps, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Average time for getGroups, name=GetGroupsAvgTime, type=java.lang.Double, read-only, descriptor={}]] 2015-04-28 15:10:28,262 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done 2015-04-28 15:10:28,262 DEBUG [main] org.apache.hadoop.metrics2.util.MBeans: Registered Hadoop:service=MRAppMaster,name=UgiMetrics 2015-04-28 15:10:28,262 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: MBean for source UgiMetrics registered. 2015-04-28 15:10:28,262 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Registered source UgiMetrics 2015-04-28 15:10:28,262 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: source.source.start_mbeans 2015-04-28 15:10:28,262 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'MetricsConfig' for key: source.start_mbeans 2015-04-28 15:10:28,262 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: *.source.start_mbeans 2015-04-28 15:10:28,262 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating attr cache... 2015-04-28 15:10:28,263 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done. # tags & metrics=20 2015-04-28 15:10:28,263 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating info cache... 2015-04-28 15:10:28,263 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: [javax.management.MBeanAttributeInfo[description=Metrics context, name=tag.Context, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Local hostname, name=tag.Hostname, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=JobsSubmitted, name=JobsSubmitted, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=JobsCompleted, name=JobsCompleted, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=JobsFailed, name=JobsFailed, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=JobsKilled, name=JobsKilled, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=JobsPreparing, name=JobsPreparing, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=JobsRunning, name=JobsRunning, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=MapsLaunched, name=MapsLaunched, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=MapsCompleted, name=MapsCompleted, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=MapsFailed, name=MapsFailed, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=MapsKilled, name=MapsKilled, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=MapsRunning, name=MapsRunning, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=MapsWaiting, name=MapsWaiting, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=ReducesLaunched, name=ReducesLaunched, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=ReducesCompleted, name=ReducesCompleted, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=ReducesFailed, name=ReducesFailed, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=ReducesKilled, name=ReducesKilled, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=ReducesRunning, name=ReducesRunning, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=ReducesWaiting, name=ReducesWaiting, type=java.lang.Integer, read-only, descriptor={}]] 2015-04-28 15:10:28,263 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done 2015-04-28 15:10:28,263 DEBUG [main] org.apache.hadoop.metrics2.util.MBeans: Registered Hadoop:service=MRAppMaster,name=MRAppMetrics 2015-04-28 15:10:28,263 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: MBean for source MRAppMetrics registered. 2015-04-28 15:10:28,263 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Registered source MRAppMetrics 2015-04-28 15:10:28,265 DEBUG [main] org.apache.hadoop.metrics2.util.MBeans: Registered Hadoop:service=MRAppMaster,name=MetricsSystem,sub=Control 2015-04-28 15:10:28,265 DEBUG [main] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_INIT 2015-04-28 15:10:28,266 DEBUG [main] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: startJobs: parent=/staging-dir/dsperf/.staging child=job_1430213948957_0001 2015-04-28 15:10:28,268 INFO [main] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Adding job token for job_1430213948957_0001 to jobTokenSecretManager 2015-04-28 15:10:28,279 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #10 2015-04-28 15:10:28,280 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #10 2015-04-28 15:10:28,280 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms 2015-04-28 15:10:28,286 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #11 2015-04-28 15:10:28,288 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #11 2015-04-28 15:10:28,289 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getBlockLocations took 3ms 2015-04-28 15:10:28,313 DEBUG [main] org.apache.hadoop.hdfs.DFSClient: newInfo = LocatedBlocks{ fileLength=663 underConstruction=false blocks=[LocatedBlock{BP-340492689-IP127-1430213926415:blk_1073741843_1019; getBlockSize()=663; corrupt=false; offset=0; locs=[DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK], DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK]]}] lastLocatedBlock=LocatedBlock{BP-340492689-IP127-1430213926415:blk_1073741843_1019; getBlockSize()=663; corrupt=false; offset=0; locs=[DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK], DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK]]} isLastBlockComplete=true} 2015-04-28 15:10:28,317 DEBUG [main] org.apache.hadoop.hdfs.DFSClient: Connecting to datanode IP143:50076 2015-04-28 15:10:28,325 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #12 2015-04-28 15:10:28,326 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #12 2015-04-28 15:10:28,326 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getServerDefaults took 2ms 2015-04-28 15:10:28,333 DEBUG [main] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient: SASL client skipping handshake in unsecured configuration for addr = /IP143, datanodeId = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:28,390 INFO [main] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Not uberizing job_1430213948957_0001 because: not enabled; too many maps; too much CPU; 2015-04-28 15:10:28,413 INFO [main] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Input size for job job_1430213948957_0001 = 1888. Number of splits = 16 2015-04-28 15:10:28,414 INFO [main] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Number of reduces for job job_1430213948957_0001 = 1 2015-04-28 15:10:28,414 INFO [main] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: job_1430213948957_0001Job Transitioned from NEW to INITED 2015-04-28 15:10:28,415 INFO [main] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: MRAppMaster launching normal, non-uberized, multi-container job job_1430213948957_0001. 2015-04-28 15:10:28,416 DEBUG [main] org.apache.hadoop.yarn.ipc.YarnRPC: Creating YarnRPC for org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC 2015-04-28 15:10:28,416 DEBUG [main] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc server for protocol interface org.apache.hadoop.mapreduce.v2.api.MRClientProtocol with 1 handlers 2015-04-28 15:10:28,448 INFO [main] org.apache.hadoop.ipc.CallQueueManager: Using callQueue class java.util.concurrent.LinkedBlockingQueue 2015-04-28 15:10:28,448 DEBUG [main] org.apache.hadoop.ipc.Server: TOKEN authentication enabled for secret manager 2015-04-28 15:10:28,448 DEBUG [main] org.apache.hadoop.ipc.Server: Server accepts auth methods:[TOKEN, SIMPLE] 2015-04-28 15:10:28,468 INFO [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: Starting Socket Reader #1 for port 48332 2015-04-28 15:10:28,471 DEBUG [main] org.apache.hadoop.ipc.metrics.RpcMetrics: Initialized MetricsRegistry{info=MetricsInfoImpl{name=rpc, description=rpc}, tags=[MetricsTag{info=MetricsInfoImpl{name=port, description=RPC port}, value=48332}], metrics=[]} 2015-04-28 15:10:28,472 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.receivedBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of received bytes]) 2015-04-28 15:10:28,472 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.sentBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of sent bytes]) 2015-04-28 15:10:28,472 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcQueueTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Queue time]) 2015-04-28 15:10:28,472 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Processsing time]) 2015-04-28 15:10:28,473 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of authentication failures]) 2015-04-28 15:10:28,473 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of authentication successes]) 2015-04-28 15:10:28,473 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of authorization failures]) 2015-04-28 15:10:28,473 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of authorization sucesses]) 2015-04-28 15:10:28,474 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: method public int org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of open connections]) 2015-04-28 15:10:28,476 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: method public int org.apache.hadoop.ipc.metrics.RpcMetrics.callQueueLength() with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Length of the call queue]) 2015-04-28 15:10:28,476 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: RpcActivityForPort48332, Aggregate RPC metrics 2015-04-28 15:10:28,476 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: source.source.start_mbeans 2015-04-28 15:10:28,476 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'MetricsConfig' for key: source.start_mbeans 2015-04-28 15:10:28,476 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: *.source.start_mbeans 2015-04-28 15:10:28,476 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating attr cache... 2015-04-28 15:10:28,477 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done. # tags & metrics=15 2015-04-28 15:10:28,477 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating info cache... 2015-04-28 15:10:28,477 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: [javax.management.MBeanAttributeInfo[description=RPC port, name=tag.port, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Metrics context, name=tag.Context, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Local hostname, name=tag.Hostname, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of received bytes, name=ReceivedBytes, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of sent bytes, name=SentBytes, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of ops for queue time, name=RpcQueueTimeNumOps, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Average time for queue time, name=RpcQueueTimeAvgTime, type=java.lang.Double, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of ops for processsing time, name=RpcProcessingTimeNumOps, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Average time for processsing time, name=RpcProcessingTimeAvgTime, type=java.lang.Double, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of authentication failures, name=RpcAuthenticationFailures, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of authentication successes, name=RpcAuthenticationSuccesses, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of authorization failures, name=RpcAuthorizationFailures, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of authorization sucesses, name=RpcAuthorizationSuccesses, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of open connections, name=NumOpenConnections, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Length of the call queue, name=CallQueueLength, type=java.lang.Integer, read-only, descriptor={}]] 2015-04-28 15:10:28,477 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done 2015-04-28 15:10:28,477 DEBUG [main] org.apache.hadoop.metrics2.util.MBeans: Registered Hadoop:service=MRAppMaster,name=RpcActivityForPort48332 2015-04-28 15:10:28,477 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: MBean for source RpcActivityForPort48332 registered. 2015-04-28 15:10:28,477 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Registered source RpcActivityForPort48332 2015-04-28 15:10:28,478 DEBUG [main] org.apache.hadoop.ipc.metrics.RpcDetailedMetrics: MetricsInfoImpl{name=rpcdetailed, description=rpcdetailed} 2015-04-28 15:10:28,479 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRates org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.rates with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:28,484 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: RpcDetailedActivityForPort48332, Per method RPC metrics 2015-04-28 15:10:28,484 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: source.source.start_mbeans 2015-04-28 15:10:28,484 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'MetricsConfig' for key: source.start_mbeans 2015-04-28 15:10:28,484 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: *.source.start_mbeans 2015-04-28 15:10:28,484 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating attr cache... 2015-04-28 15:10:28,484 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done. # tags & metrics=3 2015-04-28 15:10:28,484 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating info cache... 2015-04-28 15:10:28,485 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: [javax.management.MBeanAttributeInfo[description=RPC port, name=tag.port, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Metrics context, name=tag.Context, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Local hostname, name=tag.Hostname, type=java.lang.String, read-only, descriptor={}]] 2015-04-28 15:10:28,486 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done 2015-04-28 15:10:28,486 DEBUG [main] org.apache.hadoop.metrics2.util.MBeans: Registered Hadoop:service=MRAppMaster,name=RpcDetailedActivityForPort48332 2015-04-28 15:10:28,486 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: MBean for source RpcDetailedActivityForPort48332 registered. 2015-04-28 15:10:28,486 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Registered source RpcDetailedActivityForPort48332 2015-04-28 15:10:28,495 DEBUG [main] org.apache.hadoop.ipc.Server: RpcKind = RPC_PROTOCOL_BUFFER Protocol Name = org.apache.hadoop.ipc.ProtocolMetaInfoPB version=1 ProtocolImpl=org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos$ProtocolInfoService$2 protocolClass=org.apache.hadoop.ipc.ProtocolMetaInfoPB 2015-04-28 15:10:28,495 DEBUG [main] org.apache.hadoop.ipc.Server: RpcKind = RPC_PROTOCOL_BUFFER Protocol Name = org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB version=1 ProtocolImpl=org.apache.hadoop.yarn.proto.MRClientProtocol$MRClientProtocolService$2 protocolClass=org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB 2015-04-28 15:10:28,495 INFO [main] org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl: Adding protocol org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB to the server 2015-04-28 15:10:28,495 DEBUG [main] org.apache.hadoop.ipc.Server: RpcKind = RPC_PROTOCOL_BUFFER Protocol Name = org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB version=1 ProtocolImpl=org.apache.hadoop.yarn.proto.MRClientProtocol$MRClientProtocolService$2 protocolClass=org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB 2015-04-28 15:10:28,496 INFO [IPC Server Responder] org.apache.hadoop.ipc.Server: IPC Server Responder: starting 2015-04-28 15:10:28,496 INFO [IPC Server listener on 48332] org.apache.hadoop.ipc.Server: IPC Server listener on 48332: starting 2015-04-28 15:10:28,497 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: starting 2015-04-28 15:10:28,497 INFO [main] org.apache.hadoop.mapreduce.v2.app.client.MRClientService: Instantiated MRClientService at host-IP143/IP143:48332 2015-04-28 15:10:28,561 INFO [main] org.mortbay.log: Logging to org.slf4j.impl.Log4jLoggerAdapter(org.mortbay.log) via org.mortbay.log.Slf4jLog 2015-04-28 15:10:28,561 DEBUG [main] org.mortbay.log: filterNameMap=null 2015-04-28 15:10:28,562 DEBUG [main] org.mortbay.log: pathFilters=null 2015-04-28 15:10:28,562 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,562 DEBUG [main] org.mortbay.log: servletPathMap={/=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,562 DEBUG [main] org.mortbay.log: servletNameMap={org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,564 DEBUG [main] org.mortbay.log: filterNameMap={NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,564 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15)] 2015-04-28 15:10:28,564 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,564 DEBUG [main] org.mortbay.log: servletPathMap={/=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,564 DEBUG [main] org.mortbay.log: servletNameMap={org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,569 INFO [main] org.apache.hadoop.security.authentication.server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2015-04-28 15:10:28,571 DEBUG [main] org.mortbay.log: Container Server@4e2c95ee + org.mortbay.thread.QueuedThreadPool@7180e701 as threadpool 2015-04-28 15:10:28,573 INFO [main] org.apache.hadoop.http.HttpRequestLog: Http request log for http.requests.mapreduce is not defined 2015-04-28 15:10:28,573 DEBUG [main] org.mortbay.log: Container Server@4e2c95ee + ContextHandlerCollection@7ff2b8d2 as handler 2015-04-28 15:10:28,574 DEBUG [main] org.mortbay.log: Container ContextHandlerCollection@7ff2b8d2 + org.mortbay.jetty.webapp.WebAppContext@6dc1484{/,jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/mapreduce} as handler 2015-04-28 15:10:28,574 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + NoCacheFilter as filter 2015-04-28 15:10:28,574 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=NoCacheFilter,[/*],[],15) as filterMapping 2015-04-28 15:10:28,574 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + org.mortbay.jetty.servlet.DefaultServlet-1596009860 as servlet 2015-04-28 15:10:28,574 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (S=org.mortbay.jetty.servlet.DefaultServlet-1596009860,[/]) as servletMapping 2015-04-28 15:10:28,574 DEBUG [main] org.mortbay.log: Container SecurityHandler@2fb5fe30 + ServletHandler@6e92c6ad as handler 2015-04-28 15:10:28,574 DEBUG [main] org.mortbay.log: Container SessionHandler@456be73c + SecurityHandler@2fb5fe30 as handler 2015-04-28 15:10:28,574 DEBUG [main] org.mortbay.log: Container SessionHandler@456be73c + org.mortbay.jetty.servlet.HashSessionManager@2375b321 as sessionManager 2015-04-28 15:10:28,574 DEBUG [main] org.mortbay.log: Container org.mortbay.jetty.webapp.WebAppContext@6dc1484{/,jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/mapreduce} + SessionHandler@456be73c as handler 2015-04-28 15:10:28,574 DEBUG [main] org.mortbay.log: Container org.mortbay.jetty.webapp.WebAppContext@6dc1484{/,jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/mapreduce} + ErrorPageErrorHandler@5baaae4c as error 2015-04-28 15:10:28,575 DEBUG [main] org.mortbay.log: Container ContextHandlerCollection@7ff2b8d2 + org.mortbay.jetty.servlet.Context@5b6e8f77{/static,null} as handler 2015-04-28 15:10:28,575 DEBUG [main] org.mortbay.log: Container org.mortbay.jetty.servlet.Context@5b6e8f77{/static,null} + ServletHandler@41a6d121 as handler 2015-04-28 15:10:28,580 DEBUG [main] org.mortbay.log: Container ServletHandler@41a6d121 + org.mortbay.jetty.servlet.DefaultServlet-809822663 as servlet 2015-04-28 15:10:28,580 DEBUG [main] org.mortbay.log: Container ServletHandler@41a6d121 + (S=org.mortbay.jetty.servlet.DefaultServlet-809822663,[/*]) as servletMapping 2015-04-28 15:10:28,580 DEBUG [main] org.mortbay.log: filterNameMap=null 2015-04-28 15:10:28,580 DEBUG [main] org.mortbay.log: pathFilters=null 2015-04-28 15:10:28,580 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,580 DEBUG [main] org.mortbay.log: servletPathMap={/*=org.mortbay.jetty.servlet.DefaultServlet-809822663} 2015-04-28 15:10:28,580 DEBUG [main] org.mortbay.log: servletNameMap={org.mortbay.jetty.servlet.DefaultServlet-809822663=org.mortbay.jetty.servlet.DefaultServlet-809822663} 2015-04-28 15:10:28,580 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + safety as filter 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=safety,[/*],[],15) as filterMapping 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15)] 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: servletPathMap={/=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: servletNameMap={org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: Container ServletHandler@41a6d121 + safety as filter 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: Container ServletHandler@41a6d121 + (F=safety,[/*],[],15) as filterMapping 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety} 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: pathFilters=[(F=safety,[/*],[],15)] 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: servletPathMap={/*=org.mortbay.jetty.servlet.DefaultServlet-809822663} 2015-04-28 15:10:28,581 DEBUG [main] org.mortbay.log: servletNameMap={org.mortbay.jetty.servlet.DefaultServlet-809822663=org.mortbay.jetty.servlet.DefaultServlet-809822663} 2015-04-28 15:10:28,581 INFO [main] org.apache.hadoop.http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + AM_PROXY_FILTER as filter 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15) as filterMapping 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15)] 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: servletPathMap={/=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: servletNameMap={org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,584 INFO [main] org.apache.hadoop.http.HttpServer2: Added filter AM_PROXY_FILTER (class=org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter) to context mapreduce 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: Container ServletHandler@41a6d121 + AM_PROXY_FILTER as filter 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: Container ServletHandler@41a6d121 + (F=AM_PROXY_FILTER,[/*],[],15) as filterMapping 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER} 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: pathFilters=[(F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[/*],[],15)] 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: servletPathMap={/*=org.mortbay.jetty.servlet.DefaultServlet-809822663} 2015-04-28 15:10:28,584 DEBUG [main] org.mortbay.log: servletNameMap={org.mortbay.jetty.servlet.DefaultServlet-809822663=org.mortbay.jetty.servlet.DefaultServlet-809822663} 2015-04-28 15:10:28,584 INFO [main] org.apache.hadoop.http.HttpServer2: Added filter AM_PROXY_FILTER (class=org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter) to context static 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + stacks as servlet 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (S=stacks,[/stacks]) as servletMapping 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15)] 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: servletPathMap={/stacks=stacks, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: servletNameMap={stacks=stacks, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=AM_PROXY_FILTER,[/stacks],[],15) as filterMapping 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15)] 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: servletPathMap={/stacks=stacks, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,585 DEBUG [main] org.mortbay.log: servletNameMap={stacks=stacks, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + logLevel as servlet 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (S=logLevel,[/logLevel]) as servletMapping 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15)] 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: servletPathMap={/stacks=stacks, /logLevel=logLevel, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, stacks=stacks, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=AM_PROXY_FILTER,[/logLevel],[],15) as filterMapping 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15)] 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: servletPathMap={/stacks=stacks, /logLevel=logLevel, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,586 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, stacks=stacks, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + metrics as servlet 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (S=metrics,[/metrics]) as servletMapping 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15)] 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: servletPathMap={/stacks=stacks, /logLevel=logLevel, /metrics=metrics, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, stacks=stacks, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860, metrics=metrics} 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=AM_PROXY_FILTER,[/metrics],[],15) as filterMapping 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15), (F=AM_PROXY_FILTER,[/metrics],[],15)] 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: servletPathMap={/stacks=stacks, /logLevel=logLevel, /metrics=metrics, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,587 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, stacks=stacks, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860, metrics=metrics} 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + jmx as servlet 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (S=jmx,[/jmx]) as servletMapping 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15), (F=AM_PROXY_FILTER,[/metrics],[],15)] 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: servletPathMap={/jmx=jmx, /stacks=stacks, /logLevel=logLevel, /metrics=metrics, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, jmx=jmx, stacks=stacks, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860, metrics=metrics} 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=AM_PROXY_FILTER,[/jmx],[],15) as filterMapping 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15), (F=AM_PROXY_FILTER,[/metrics],[],15), (F=AM_PROXY_FILTER,[/jmx],[],15)] 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: servletPathMap={/jmx=jmx, /stacks=stacks, /logLevel=logLevel, /metrics=metrics, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,588 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, jmx=jmx, stacks=stacks, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860, metrics=metrics} 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + conf as servlet 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (S=conf,[/conf]) as servletMapping 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15), (F=AM_PROXY_FILTER,[/metrics],[],15), (F=AM_PROXY_FILTER,[/jmx],[],15)] 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: servletPathMap={/jmx=jmx, /conf=conf, /stacks=stacks, /logLevel=logLevel, /metrics=metrics, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, jmx=jmx, stacks=stacks, conf=conf, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860, metrics=metrics} 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=AM_PROXY_FILTER,[/conf],[],15) as filterMapping 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15), (F=AM_PROXY_FILTER,[/metrics],[],15), (F=AM_PROXY_FILTER,[/jmx],[],15), (F=AM_PROXY_FILTER,[/conf],[],15)] 2015-04-28 15:10:28,589 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: servletPathMap={/jmx=jmx, /conf=conf, /stacks=stacks, /logLevel=logLevel, /metrics=metrics, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, jmx=jmx, stacks=stacks, conf=conf, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860, metrics=metrics} 2015-04-28 15:10:28,590 INFO [main] org.apache.hadoop.http.HttpServer2: adding path spec: /mapreduce/* 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=AM_PROXY_FILTER,[/mapreduce/*],[],15) as filterMapping 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15), (F=AM_PROXY_FILTER,[/metrics],[],15), (F=AM_PROXY_FILTER,[/jmx],[],15), (F=AM_PROXY_FILTER,[/conf],[],15), (F=AM_PROXY_FILTER,[/mapreduce/*],[],15)] 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: servletPathMap={/jmx=jmx, /conf=conf, /stacks=stacks, /logLevel=logLevel, /metrics=metrics, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, jmx=jmx, stacks=stacks, conf=conf, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860, metrics=metrics} 2015-04-28 15:10:28,590 INFO [main] org.apache.hadoop.http.HttpServer2: adding path spec: /ws/* 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=AM_PROXY_FILTER,[/ws/*],[],15) as filterMapping 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter} 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15), (F=AM_PROXY_FILTER,[/metrics],[],15), (F=AM_PROXY_FILTER,[/jmx],[],15), (F=AM_PROXY_FILTER,[/conf],[],15), (F=AM_PROXY_FILTER,[/mapreduce/*],[],15), (F=AM_PROXY_FILTER,[/ws/*],[],15)] 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: servletPathMap={/jmx=jmx, /conf=conf, /stacks=stacks, /logLevel=logLevel, /metrics=metrics, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,590 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, jmx=jmx, stacks=stacks, conf=conf, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860, metrics=metrics} 2015-04-28 15:10:28,599 DEBUG [main] org.mortbay.log: Container Server@4e2c95ee + HttpServer2$SelectChannelConnectorWithSafeStartup@0.0.0.0:0 as connector 2015-04-28 15:10:28,599 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + guice as filter 2015-04-28 15:10:28,600 DEBUG [main] org.mortbay.log: Container ServletHandler@6e92c6ad + (F=guice,[/*],[],15) as filterMapping 2015-04-28 15:10:28,600 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter, guice=guice} 2015-04-28 15:10:28,600 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15), (F=AM_PROXY_FILTER,[/metrics],[],15), (F=AM_PROXY_FILTER,[/jmx],[],15), (F=AM_PROXY_FILTER,[/conf],[],15), (F=AM_PROXY_FILTER,[/mapreduce/*],[],15), (F=AM_PROXY_FILTER,[/ws/*],[],15), (F=guice,[/*],[],15)] 2015-04-28 15:10:28,600 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,600 DEBUG [main] org.mortbay.log: servletPathMap={/jmx=jmx, /conf=conf, /stacks=stacks, /logLevel=logLevel, /metrics=metrics, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,600 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, jmx=jmx, stacks=stacks, conf=conf, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860, metrics=metrics} 2015-04-28 15:10:28,600 INFO [main] org.apache.hadoop.http.HttpServer2: Jetty bound to port 13644 2015-04-28 15:10:28,600 INFO [main] org.mortbay.log: jetty-6.1.26 2015-04-28 15:10:28,607 DEBUG [main] org.mortbay.log: started org.mortbay.thread.QueuedThreadPool@7180e701 2015-04-28 15:10:28,624 DEBUG [main] org.mortbay.log: Thread Context class loader is: ContextLoader@mapreduce([]) / sun.misc.Launcher$AppClassLoader@28c97a5 2015-04-28 15:10:28,624 DEBUG [main] org.mortbay.log: Parent class loader is: sun.misc.Launcher$AppClassLoader@28c97a5 2015-04-28 15:10:28,624 DEBUG [main] org.mortbay.log: Parent class loader is: sun.misc.Launcher$ExtClassLoader@7cbd213e 2015-04-28 15:10:28,625 DEBUG [main] org.mortbay.log: Try webapp=jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/mapreduce, exists=true, directory=true 2015-04-28 15:10:28,626 DEBUG [main] org.mortbay.log: Created temp dir /tmp/Jetty_0_0_0_0_13644_mapreduce____.xeeumz for org.mortbay.jetty.webapp.WebAppContext@6dc1484{/,jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/mapreduce} 2015-04-28 15:10:28,626 INFO [main] org.mortbay.log: Extract jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/mapreduce to /tmp/Jetty_0_0_0_0_13644_mapreduce____.xeeumz/webapp 2015-04-28 15:10:28,626 DEBUG [main] org.mortbay.log: Extract jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/mapreduce to /tmp/Jetty_0_0_0_0_13644_mapreduce____.xeeumz/webapp 2015-04-28 15:10:28,626 DEBUG [main] org.mortbay.log: Extracting entry = webapps/mapreduce from jar file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar 2015-04-28 15:10:28,627 DEBUG [main] org.mortbay.log: Skipping entry: META-INF/services/ 2015-04-28 15:10:28,627 DEBUG [main] org.mortbay.log: Skipping entry: webapps/ 2015-04-28 15:10:28,627 DEBUG [main] org.mortbay.log: Skipping entry: webapps/proxy/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/node/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/yarn/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/applicationhistory/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/cluster/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/test/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/jobhistory/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jt/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/js/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/css/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: org/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/ 2015-04-28 15:10:28,628 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/security/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/impl/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/impl/pb/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/impl/pb/service/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/impl/pb/client/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/admin/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/event/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/timeline/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/resource/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/ipc/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factories/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factories/impl/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factories/impl/pb/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/ 2015-04-28 15:10:28,629 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/service/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/client/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/log/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/example/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/util/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factory/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factory/providers/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/sharedcache/ 2015-04-28 15:10:28,630 DEBUG [main] org.mortbay.log: Skipping entry: META-INF/services/org.apache.hadoop.security.token.TokenRenewer 2015-04-28 15:10:28,631 DEBUG [main] org.mortbay.log: Skipping entry: META-INF/services/org.apache.hadoop.security.token.TokenIdentifier 2015-04-28 15:10:28,631 DEBUG [main] org.mortbay.log: Skipping entry: META-INF/services/org.apache.hadoop.security.SecurityInfo 2015-04-28 15:10:28,631 DEBUG [main] org.mortbay.log: Skipping entry: yarn-version-info.properties 2015-04-28 15:10:28,631 DEBUG [main] org.mortbay.log: Skipping entry: webapps/proxy/.keep 2015-04-28 15:10:28,631 DEBUG [main] org.mortbay.log: Skipping entry: webapps/node/.keep 2015-04-28 15:10:28,631 DEBUG [main] org.mortbay.log: Skipping entry: webapps/yarn/.keep 2015-04-28 15:10:28,631 DEBUG [main] org.mortbay.log: Skipping entry: webapps/applicationhistory/.keep 2015-04-28 15:10:28,631 DEBUG [main] org.mortbay.log: Skipping entry: webapps/cluster/.keep 2015-04-28 15:10:28,631 DEBUG [main] org.mortbay.log: Skipping entry: webapps/test/.keep 2015-04-28 15:10:28,635 DEBUG [main] org.mortbay.log: Skipping entry: webapps/jobhistory/.keep 2015-04-28 15:10:28,636 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/yarn.dt.plugins.js 2015-04-28 15:10:28,636 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jt/jquery.jstree.js.gz 2015-04-28 15:10:28,636 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/yarn.css 2015-04-28 15:10:28,636 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/back_disabled.jpg 2015-04-28 15:10:28,636 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/forward_enabled.jpg 2015-04-28 15:10:28,637 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/sort_desc.png 2015-04-28 15:10:28,637 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/forward_disabled.jpg 2015-04-28 15:10:28,637 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/sort_asc.png 2015-04-28 15:10:28,637 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/sort_desc_disabled.png 2015-04-28 15:10:28,637 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/sort_asc_disabled.png 2015-04-28 15:10:28,637 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/back_enabled.jpg 2015-04-28 15:10:28,637 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/favicon.ico 2015-04-28 15:10:28,637 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/Sorting icons.psd 2015-04-28 15:10:28,637 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/images/sort_both.png 2015-04-28 15:10:28,638 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/js/jquery.dataTables.min.js.gz 2015-04-28 15:10:28,638 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/css/demo_table.css 2015-04-28 15:10:28,638 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/css/demo_page.css 2015-04-28 15:10:28,638 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/dt-1.9.4/css/jui-dt.css 2015-04-28 15:10:28,638 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/jquery-ui-1.9.1.custom.min.js.gz 2015-04-28 15:10:28,639 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-bg_glass_65_ffffff_1x400.png 2015-04-28 15:10:28,639 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-bg_glass_75_dadada_1x400.png 2015-04-28 15:10:28,639 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-bg_glass_95_fef1ec_1x400.png 2015-04-28 15:10:28,639 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-bg_glass_75_e6e6e6_1x400.png 2015-04-28 15:10:28,639 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-bg_highlight-soft_75_cccccc_1x100.png 2015-04-28 15:10:28,639 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-bg_flat_0_aaaaaa_40x100.png 2015-04-28 15:10:28,639 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-icons_222222_256x240.png 2015-04-28 15:10:28,639 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-bg_glass_55_fbf9ee_1x400.png 2015-04-28 15:10:28,640 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-icons_454545_256x240.png 2015-04-28 15:10:28,640 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-icons_888888_256x240.png 2015-04-28 15:10:28,640 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-icons_cd0a0a_256x240.png 2015-04-28 15:10:28,640 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-bg_flat_75_ffffff_40x100.png 2015-04-28 15:10:28,640 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/images/ui-icons_2e83ff_256x240.png 2015-04-28 15:10:28,640 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/themes-1.9.1/base/jquery-ui.css 2015-04-28 15:10:28,641 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/jquery/jquery-1.8.2.min.js.gz 2015-04-28 15:10:28,641 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/hadoop-st.png 2015-04-28 15:10:28,642 DEBUG [main] org.mortbay.log: Skipping entry: webapps/static/busy.gif 2015-04-28 15:10:28,642 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/package-info.class 2015-04-28 15:10:28,642 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$NMTokenIdentifierProto$Builder.class 2015-04-28 15:10:28,642 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$ContainerTokenIdentifierProto$Builder.class 2015-04-28 15:10:28,643 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos.class 2015-04-28 15:10:28,643 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$ClientToAMTokenIdentifierProtoOrBuilder.class 2015-04-28 15:10:28,643 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$ContainerTokenIdentifierProto$1.class 2015-04-28 15:10:28,643 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$YARNDelegationTokenIdentifierProto$1.class 2015-04-28 15:10:28,643 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$YARNDelegationTokenIdentifierProtoOrBuilder.class 2015-04-28 15:10:28,643 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$AMRMTokenIdentifierProtoOrBuilder.class 2015-04-28 15:10:28,643 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$ClientToAMTokenIdentifierProto.class 2015-04-28 15:10:28,644 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$ClientToAMTokenIdentifierProto$Builder.class 2015-04-28 15:10:28,644 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$ContainerTokenIdentifierProtoOrBuilder.class 2015-04-28 15:10:28,644 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$ContainerTokenIdentifierProto.class 2015-04-28 15:10:28,644 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$ClientToAMTokenIdentifierProto$1.class 2015-04-28 15:10:28,644 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$1.class 2015-04-28 15:10:28,645 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$AMRMTokenIdentifierProto$1.class 2015-04-28 15:10:28,645 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$NMTokenIdentifierProtoOrBuilder.class 2015-04-28 15:10:28,645 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$NMTokenIdentifierProto$1.class 2015-04-28 15:10:28,645 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$AMRMTokenIdentifierProto$Builder.class 2015-04-28 15:10:28,645 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$NMTokenIdentifierProto.class 2015-04-28 15:10:28,645 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$AMRMTokenIdentifierProto.class 2015-04-28 15:10:28,646 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$YARNDelegationTokenIdentifierProto.class 2015-04-28 15:10:28,646 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/proto/YarnSecurityTokenProtos$YARNDelegationTokenIdentifierProto$Builder.class 2015-04-28 15:10:28,646 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/StateMachineFactory$Transition.class 2015-04-28 15:10:28,646 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/package-info.class 2015-04-28 15:10:28,646 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/InvalidStateTransitonException.class 2015-04-28 15:10:28,646 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/StateMachineFactory$MultipleInternalArc.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/StateMachineFactory$SingleInternalArc.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/Graph$Edge.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/StateMachineFactory$ApplicableSingleOrMultipleTransition.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/StateMachineFactory.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/StateMachineFactory$TransitionsListNode.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/Graph$Node.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/StateMachineFactory$ApplicableTransition.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/VisualizeStateMachine.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/Graph.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/MultipleArcTransition.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/StateMachine.class 2015-04-28 15:10:28,647 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/StateMachineFactory$InternalStateMachine.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/state/SingleArcTransition.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/ContainerLogAppender.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/security/package-info.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/security/ApplicationACLsManager.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/AddToClusterNodeLabelsRequestPBImpl.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/package-info.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshUserToGroupsMappingsRequestPBImpl.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshNodesRequestPBImpl.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshNodesResponsePBImpl.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/UpdateNodeResourceRequestPBImpl$1.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshSuperUserGroupsConfigurationRequestPBImpl.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshQueuesRequestPBImpl.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeResponsePBImpl.class 2015-04-28 15:10:28,648 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RunSharedCacheCleanerTaskResponsePBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshAdminAclsResponsePBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RunSharedCacheCleanerTaskRequestPBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RemoveFromClusterNodeLabelsRequestPBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RemoveFromClusterNodeLabelsResponsePBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshServiceAclsResponsePBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/AddToClusterNodeLabelsResponsePBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeRequestPBImpl$1$1.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/UpdateNodeResourceRequestPBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshQueuesResponsePBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/UpdateNodeResourceRequestPBImpl$1$1.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeRequestPBImpl$1.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeRequestPBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/UpdateNodeResourceResponsePBImpl.class 2015-04-28 15:10:28,649 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshAdminAclsRequestPBImpl.class 2015-04-28 15:10:28,650 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshServiceAclsRequestPBImpl.class 2015-04-28 15:10:28,650 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshUserToGroupsMappingsResponsePBImpl.class 2015-04-28 15:10:28,650 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RefreshSuperUserGroupsConfigurationResponsePBImpl.class 2015-04-28 15:10:28,650 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/ResourceManagerAdministrationProtocolPB.class 2015-04-28 15:10:28,650 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/impl/pb/service/SCMAdminProtocolPBServiceImpl.class 2015-04-28 15:10:28,650 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceManagerAdministrationProtocolPBServiceImpl.class 2015-04-28 15:10:28,650 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.class 2015-04-28 15:10:28,650 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/server/api/impl/pb/client/SCMAdminProtocolPBClientImpl.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/package-info.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/YarnAuthorizationProvider.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/AccessType.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/PrivilegedEntity.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/ContainerTokenIdentifier$Renewer.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/AMRMTokenIdentifier.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/ConfiguredYarnAuthorizer.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/ContainerManagerSecurityInfo.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/AdminACLsManager.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/AMRMTokenSelector.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/PrivilegedEntity$EntityType.class 2015-04-28 15:10:28,651 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/ContainerTokenIdentifier.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/admin/package-info.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/admin/AdminSecurityInfo$1.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/admin/AdminSecurityInfo.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/SchedulerSecurityInfo.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/package-info.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/YARNDelegationTokenIdentifier.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/ClientRMSecurityInfo.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/TimelineDelegationTokenIdentifier$Renewer.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/RMDelegationTokenIdentifier.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/ClientToAMTokenSecretManager.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/ClientRMSecurityInfo$2.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/ClientToAMTokenSelector.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/ClientTimelineSecurityInfo$1.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/RMDelegationTokenIdentifier$Renewer.class 2015-04-28 15:10:28,652 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/TimelineDelegationTokenIdentifier.class 2015-04-28 15:10:28,653 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/ClientRMSecurityInfo$1.class 2015-04-28 15:10:28,653 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/ClientTimelineSecurityInfo$2.class 2015-04-28 15:10:28,653 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/RMDelegationTokenSelector.class 2015-04-28 15:10:28,653 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/ClientToAMTokenIdentifier.class 2015-04-28 15:10:28,653 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/ClientTimelineSecurityInfo.class 2015-04-28 15:10:28,653 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/TimelineDelegationTokenOperation.class 2015-04-28 15:10:28,653 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/TimelineDelegationTokenSelector.class 2015-04-28 15:10:28,653 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/TimelineAuthenticationConsts.class 2015-04-28 15:10:28,653 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/client/BaseClientToAMTokenSecretManager.class 2015-04-28 15:10:28,653 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/ContainerTokenSelector.class 2015-04-28 15:10:28,654 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/ContainerManagerSecurityInfo$1.class 2015-04-28 15:10:28,654 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/NMTokenSelector.class 2015-04-28 15:10:28,654 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/NMTokenIdentifier.class 2015-04-28 15:10:28,654 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/SchedulerSecurityInfo$1.class 2015-04-28 15:10:28,654 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/security/AMRMTokenIdentifier$Renewer.class 2015-04-28 15:10:28,654 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager$NodeLabelUpdateOperation.class 2015-04-28 15:10:28,654 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.class 2015-04-28 15:10:28,655 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager$1.class 2015-04-28 15:10:28,655 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager$ForwardingEventHandler.class 2015-04-28 15:10:28,655 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.class 2015-04-28 15:10:28,655 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager$Host.class 2015-04-28 15:10:28,655 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager$Node.class 2015-04-28 15:10:28,655 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.class 2015-04-28 15:10:28,655 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/event/UpdateNodeToLabelsMappingsEvent.class 2015-04-28 15:10:28,655 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/event/RemoveClusterNodeLabels.class 2015-04-28 15:10:28,655 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/event/StoreNewClusterNodeLabels.class 2015-04-28 15:10:28,655 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/event/NodeLabelsStoreEventType.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/event/NodeLabelsStoreEvent.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore$1.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/NodeLabel.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore$SerializedLogType.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/AuxiliaryServiceHelper.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/package-info.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/Times.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/RMHAUtils.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/SystemClock.class 2015-04-28 15:10:28,656 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.class 2015-04-28 15:10:28,657 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/RackResolver.class 2015-04-28 15:10:28,657 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/StringHelper.class 2015-04-28 15:10:28,657 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/Times$1.class 2015-04-28 15:10:28,657 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/WindowsBasedProcessTree$ProcessInfo.class 2015-04-28 15:10:28,657 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/FSDownload$1.class 2015-04-28 15:10:28,657 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/Apps.class 2015-04-28 15:10:28,657 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ResourceCalculatorProcessTree.class 2015-04-28 15:10:28,658 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/LinuxResourceCalculatorPlugin.class 2015-04-28 15:10:28,658 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ConverterUtils.class 2015-04-28 15:10:28,658 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/AbstractLivelinessMonitor$PingChecker.class 2015-04-28 15:10:28,658 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/AbstractLivelinessMonitor$1.class 2015-04-28 15:10:28,658 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ProcfsBasedProcessTree$1.class 2015-04-28 15:10:28,658 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/timeline/package-info.class 2015-04-28 15:10:28,658 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/timeline/TimelineUtils.class 2015-04-28 15:10:28,658 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.class 2015-04-28 15:10:28,659 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/UTCClock.class 2015-04-28 15:10:28,659 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/YarnVersionInfo.class 2015-04-28 15:10:28,659 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/Clock.class 2015-04-28 15:10:28,659 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ProcfsBasedProcessTree$ProcessInfo.class 2015-04-28 15:10:28,659 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/FSDownload$3.class 2015-04-28 15:10:28,659 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ProcfsBasedProcessTree$ProcessSmapMemoryInfo.class 2015-04-28 15:10:28,659 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ApplicationClassLoader.class 2015-04-28 15:10:28,659 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/TrackingUriPlugin.class 2015-04-28 15:10:28,659 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ProcfsBasedProcessTree$MemInfo.class 2015-04-28 15:10:28,659 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/resource/DefaultResourceCalculator.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/resource/DominantResourceCalculator.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/resource/Resources$2.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/resource/ResourceCalculator.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/resource/Resources.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/resource/Resources$1.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/FSDownload.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/CpuTimeTracker.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/FSDownload$4.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/ProcfsBasedProcessTree$ProcessTreeSmapMemInfo.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/WindowsResourceCalculatorPlugin.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/WindowsBasedProcessTree.class 2015-04-28 15:10:28,660 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/util/FSDownload$2.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/ContainerRollingLogAppender.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/ipc/package-info.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/ipc/YarnRPC.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/ipc/RPCUtil.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/YarnUncaughtExceptionHandler.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/AHSProxy$1.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/ConfiguredRMFailoverProxyProvider.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/NMProxy.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/RMHAServiceTarget.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/ClientRMProxy.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/RMFailoverProxyProvider.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/RMProxy.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/package-info.class 2015-04-28 15:10:28,661 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/TimelineClient.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/package-info.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$TimelineClientConnectionRetry.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$TimelineClientRetryOp.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$TimelineURLConnectionFactory.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$5.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$4.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$TimelineJerseyRetryFilter$1.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$2.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$3.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$6.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$TimelineJerseyRetryFilter.class 2015-04-28 15:10:28,662 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$7.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl$1.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/ClientRMProxy$ClientRMProtocols.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/ServerProxy.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/ServerProxy$1.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/RMProxy$1.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/client/AHSProxy.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/LocalConfigurationProvider.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factories/package-info.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factories/RpcServerFactory.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factories/impl/pb/package-info.class 2015-04-28 15:10:28,663 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factories/RpcClientFactory.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/ApplicationClientProtocolPB.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationAttemptsRequestPBImpl.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/package-info.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetDelegationTokenResponsePBImpl.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RenewDelegationTokenResponsePBImpl.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerReportRequestPBImpl.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$4.class 2015-04-28 15:10:28,664 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterRequestPBImpl.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$4$1.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ReleaseSharedCacheResourceRequestPBImpl.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationAttemptsResponsePBImpl$1.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsResponsePBImpl$1$1.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl$2$1.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StartContainersResponsePBImpl.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$6.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$5.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationReportRequestPBImpl.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl$1.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetLabelsToNodesResponsePBImpl$1$1.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterMetricsRequestPBImpl.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/UseSharedCacheResourceRequestPBImpl.class 2015-04-28 15:10:28,665 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsResponsePBImpl.class 2015-04-28 15:10:28,666 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ReservationUpdateRequestPBImpl.class 2015-04-28 15:10:28,666 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainersResponsePBImpl$1.class 2015-04-28 15:10:28,666 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StartContainersResponsePBImpl$1.class 2015-04-28 15:10:28,666 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterNodesResponsePBImpl$1.class 2015-04-28 15:10:28,666 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationReportResponsePBImpl.class 2015-04-28 15:10:28,666 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetLabelsToNodesRequestPBImpl.class 2015-04-28 15:10:28,666 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StartContainersResponsePBImpl$1$1.class 2015-04-28 15:10:28,666 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/SubmitApplicationRequestPBImpl.class 2015-04-28 15:10:28,666 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterNodeLabelsResponsePBImpl.class 2015-04-28 15:10:28,666 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl.class 2015-04-28 15:10:28,667 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StartContainerRequestPBImpl.class 2015-04-28 15:10:28,667 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/SubmitApplicationResponsePBImpl.class 2015-04-28 15:10:28,667 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl$3.class 2015-04-28 15:10:28,667 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RenewDelegationTokenRequestPBImpl.class 2015-04-28 15:10:28,667 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/MoveApplicationAcrossQueuesResponsePBImpl.class 2015-04-28 15:10:28,667 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl$2$1.class 2015-04-28 15:10:28,667 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl$1.class 2015-04-28 15:10:28,667 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$1.class 2015-04-28 15:10:28,667 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StartContainersRequestPBImpl.class 2015-04-28 15:10:28,667 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterMetricsResponsePBImpl.class 2015-04-28 15:10:28,668 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetDelegationTokenRequestPBImpl.class 2015-04-28 15:10:28,668 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerReportResponsePBImpl.class 2015-04-28 15:10:28,668 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationAttemptReportResponsePBImpl.class 2015-04-28 15:10:28,668 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.class 2015-04-28 15:10:28,668 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl$3$1.class 2015-04-28 15:10:28,668 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetQueueInfoResponsePBImpl.class 2015-04-28 15:10:28,668 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainersResponsePBImpl$1.class 2015-04-28 15:10:28,668 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterResponsePBImpl.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ReservationDeleteResponsePBImpl.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainersRequestPBImpl.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetLabelsToNodesResponsePBImpl.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StartContainersResponsePBImpl$2.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ReleaseSharedCacheResourceResponsePBImpl.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$1$1.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetLabelsToNodesResponsePBImpl$1.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/KillApplicationResponsePBImpl.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl$2.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNodesToLabelsResponsePBImpl$1$1.class 2015-04-28 15:10:28,669 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetQueueUserAclsInfoResponsePBImpl.class 2015-04-28 15:10:28,670 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNodesToLabelsResponsePBImpl$1.class 2015-04-28 15:10:28,670 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl$1$1.class 2015-04-28 15:10:28,670 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$3.class 2015-04-28 15:10:28,670 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl$1$1.class 2015-04-28 15:10:28,670 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNodesToLabelsResponsePBImpl.class 2015-04-28 15:10:28,670 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNewApplicationResponsePBImpl.class 2015-04-28 15:10:28,670 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl$1.class 2015-04-28 15:10:28,670 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$2$1.class 2015-04-28 15:10:28,670 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/KillApplicationRequestPBImpl.class 2015-04-28 15:10:28,671 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl.class 2015-04-28 15:10:28,671 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNewApplicationRequestPBImpl.class 2015-04-28 15:10:28,671 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl$2.class 2015-04-28 15:10:28,671 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetQueueUserAclsInfoRequestPBImpl.class 2015-04-28 15:10:28,671 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ReservationSubmissionResponsePBImpl.class 2015-04-28 15:10:28,671 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ReservationSubmissionRequestPBImpl.class 2015-04-28 15:10:28,671 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerStatusesRequestPBImpl.class 2015-04-28 15:10:28,671 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterNodesResponsePBImpl$1$1.class 2015-04-28 15:10:28,671 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterNodesRequestPBImpl$1$1.class 2015-04-28 15:10:28,672 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl$3.class 2015-04-28 15:10:28,672 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterNodesResponsePBImpl.class 2015-04-28 15:10:28,672 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/CancelDelegationTokenResponsePBImpl.class 2015-04-28 15:10:28,672 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterNodesRequestPBImpl.class 2015-04-28 15:10:28,672 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationAttemptsResponsePBImpl.class 2015-04-28 15:10:28,672 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$6$1.class 2015-04-28 15:10:28,672 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ReservationDeleteRequestPBImpl.class 2015-04-28 15:10:28,672 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetQueueInfoRequestPBImpl.class 2015-04-28 15:10:28,673 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainersResponsePBImpl$1$1.class 2015-04-28 15:10:28,673 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerStatusesResponsePBImpl.class 2015-04-28 15:10:28,673 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainersResponsePBImpl.class 2015-04-28 15:10:28,673 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/UseSharedCacheResourceResponsePBImpl.class 2015-04-28 15:10:28,673 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StartContainersResponsePBImpl$2$1.class 2015-04-28 15:10:28,673 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationAttemptsResponsePBImpl$1$1.class 2015-04-28 15:10:28,673 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/MoveApplicationAcrossQueuesRequestPBImpl.class 2015-04-28 15:10:28,673 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainersRequestPBImpl.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetQueueUserAclsInfoResponsePBImpl$1.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$5$1.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ReservationUpdateResponsePBImpl.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$2.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationAttemptReportRequestPBImpl.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl$3$1.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl$3$1.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetQueueUserAclsInfoResponsePBImpl$1$1.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterNodeLabelsRequestPBImpl.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterNodesRequestPBImpl$1.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsResponsePBImpl$1.class 2015-04-28 15:10:28,674 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainersResponsePBImpl$1$1.class 2015-04-28 15:10:28,675 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainersResponsePBImpl.class 2015-04-28 15:10:28,675 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNodesToLabelsRequestPBImpl.class 2015-04-28 15:10:28,675 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/protocolrecords/impl/pb/CancelDelegationTokenRequestPBImpl.class 2015-04-28 15:10:28,675 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/ApplicationHistoryProtocolPB.class 2015-04-28 15:10:28,675 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/ContainerManagementProtocolPB.class 2015-04-28 15:10:28,675 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/service/package-info.class 2015-04-28 15:10:28,675 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/service/ApplicationClientProtocolPBServiceImpl.class 2015-04-28 15:10:28,675 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/service/ContainerManagementProtocolPBServiceImpl.class 2015-04-28 15:10:28,675 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/service/ApplicationMasterProtocolPBServiceImpl.class 2015-04-28 15:10:28,676 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/service/ApplicationHistoryProtocolPBServiceImpl.class 2015-04-28 15:10:28,676 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/service/ClientSCMProtocolPBServiceImpl.class 2015-04-28 15:10:28,676 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/client/package-info.class 2015-04-28 15:10:28,676 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagementProtocolPBClientImpl.class 2015-04-28 15:10:28,676 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/client/ApplicationHistoryProtocolPBClientImpl.class 2015-04-28 15:10:28,676 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/client/ApplicationMasterProtocolPBClientImpl.class 2015-04-28 15:10:28,676 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/client/ApplicationClientProtocolPBClientImpl.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/impl/pb/client/ClientSCMProtocolPBClientImpl.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/ApplicationMasterProtocolPB.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/PreemptionContractPBImpl$2.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/package-info.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/TokenPBImpl.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ProtoBase.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ResourceRequestPBImpl.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ApplicationIdPBImpl.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ReservationRequestsPBImpl$1$1.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ReservationRequestsPBImpl$1.class 2015-04-28 15:10:28,677 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/PreemptionContractPBImpl.class 2015-04-28 15:10:28,678 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl$1$1.class 2015-04-28 15:10:28,678 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.class 2015-04-28 15:10:28,678 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.class 2015-04-28 15:10:28,678 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/PreemptionContainerPBImpl.class 2015-04-28 15:10:28,678 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl$4$1.class 2015-04-28 15:10:28,678 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl.class 2015-04-28 15:10:28,679 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl$4.class 2015-04-28 15:10:28,679 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptReportPBImpl.class 2015-04-28 15:10:28,679 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/YarnClusterMetricsPBImpl.class 2015-04-28 15:10:28,679 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ReservationRequestPBImpl.class 2015-04-28 15:10:28,679 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/NMTokenPBImpl.class 2015-04-28 15:10:28,679 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerIdPBImpl.class 2015-04-28 15:10:28,679 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ReservationRequestsPBImpl.class 2015-04-28 15:10:28,679 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/PreemptionResourceRequestPBImpl.class 2015-04-28 15:10:28,679 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/LocalResourcePBImpl.class 2015-04-28 15:10:28,680 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerResourceIncreaseRequestPBImpl.class 2015-04-28 15:10:28,680 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/QueueUserACLInfoPBImpl$1.class 2015-04-28 15:10:28,680 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl$2$1.class 2015-04-28 15:10:28,680 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/StrictPreemptionContractPBImpl$1$1.class 2015-04-28 15:10:28,680 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/PreemptionContractPBImpl$1$1.class 2015-04-28 15:10:28,680 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ReservationIdPBImpl.class 2015-04-28 15:10:28,680 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.class 2015-04-28 15:10:28,680 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerReportPBImpl.class 2015-04-28 15:10:28,680 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/NodeReportPBImpl.class 2015-04-28 15:10:28,680 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/SerializedExceptionPBImpl.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ResourcePBImpl.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl$2.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/LogAggregationContextPBImpl.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/PreemptionContractPBImpl$2$1.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/QueueUserACLInfoPBImpl$1$1.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl$1$1.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl$2.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl$3$1.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ReservationDefinitionPBImpl.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/PreemptionContractPBImpl$1.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerResourceIncreasePBImpl.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl.class 2015-04-28 15:10:28,681 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl$1.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/URLPBImpl.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ResourceBlacklistRequestPBImpl.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl$3.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptIdPBImpl.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/NodeIdPBImpl.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/PreemptionMessagePBImpl.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl$2$1.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/StrictPreemptionContractPBImpl$1.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ApplicationResourceUsageReportPBImpl.class 2015-04-28 15:10:28,682 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/QueueUserACLInfoPBImpl.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ProtoUtils.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/PriorityPBImpl.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/StrictPreemptionContractPBImpl.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl$1.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ResourceOptionPBImpl.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/api/records/impl/pb/ContainerResourceDecreasePBImpl.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/package-info.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat$LogReader.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat$LogValue.class 2015-04-28 15:10:28,683 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat$LogWriter$1.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/ContainerLogsRetentionPolicy.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat$LogKey.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat$LogValue$1.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService$LogDeletionTask.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat$LogWriter.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat$ContainerLogsReader.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/package-info.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/ResponseInfo.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/log/package-info.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/log/AggregatedLogsPage.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock$1.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/log/AggregatedLogsNavBlock.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock$LogLimits.class 2015-04-28 15:10:28,684 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.class 2015-04-28 15:10:28,685 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Object.class 2015-04-28 15:10:28,685 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletImpl$Generic.class 2015-04-28 15:10:28,685 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$IMG.class 2015-04-28 15:10:28,685 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/package-info.class 2015-04-28 15:10:28,685 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$INS.class 2015-04-28 15:10:28,685 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$DD.class 2015-04-28 15:10:28,685 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Li.class 2015-04-28 15:10:28,685 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$COLGROUP.class 2015-04-28 15:10:28,685 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$ACRONYM.class 2015-04-28 15:10:28,686 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$TBODY.class 2015-04-28 15:10:28,686 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$H3.class 2015-04-28 15:10:28,686 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$TBODY.class 2015-04-28 15:10:28,686 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$CAPTION.class 2015-04-28 15:10:28,686 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Dl.class 2015-04-28 15:10:28,686 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$InputType.class 2015-04-28 15:10:28,686 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$DFN.class 2015-04-28 15:10:28,686 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$TD.class 2015-04-28 15:10:28,687 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$PRE.class 2015-04-28 15:10:28,687 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$H2.class 2015-04-28 15:10:28,687 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$ADDRESS.class 2015-04-28 15:10:28,687 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$SUP.class 2015-04-28 15:10:28,687 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$CODE.class 2015-04-28 15:10:28,688 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$EM.class 2015-04-28 15:10:28,688 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$HR.class 2015-04-28 15:10:28,688 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Cell.class 2015-04-28 15:10:28,688 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_ImgObject.class 2015-04-28 15:10:28,688 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$H4.class 2015-04-28 15:10:28,688 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$THEAD.class 2015-04-28 15:10:28,688 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$TFOOT.class 2015-04-28 15:10:28,688 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$SPAN.class 2015-04-28 15:10:28,688 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Heading.class 2015-04-28 15:10:28,689 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Scope.class 2015-04-28 15:10:28,689 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$B.class 2015-04-28 15:10:28,689 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$META.class 2015-04-28 15:10:28,689 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_FormCtrl.class 2015-04-28 15:10:28,689 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$DIV.class 2015-04-28 15:10:28,689 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$PRE.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Media.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$EventsAttrs.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Inline.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$BLOCKQUOTE.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$TEXTAREA.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$H6.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$H5.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_TableRow.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$H5.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Block.class 2015-04-28 15:10:28,690 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$ABBR.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_FieldSet.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$H6.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$DD.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$HTML.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$H4.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Tr.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$BR.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$ABBR.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$KBD.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$UL.class 2015-04-28 15:10:28,691 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$HEAD.class 2015-04-28 15:10:28,692 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$OPTION.class 2015-04-28 15:10:28,692 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$LABEL.class 2015-04-28 15:10:28,692 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$H1.class 2015-04-28 15:10:28,692 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$UL.class 2015-04-28 15:10:28,692 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$LEGEND.class 2015-04-28 15:10:28,692 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Listing.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$TR.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$TABLE.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Attrs.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Script.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$BR.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$BASE.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$MAP.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$CoreAttrs.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Child.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$PARAM.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$P.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$OL.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$SMALL.class 2015-04-28 15:10:28,693 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$LI.class 2015-04-28 15:10:28,694 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$BDO.class 2015-04-28 15:10:28,694 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$FontStyle.class 2015-04-28 15:10:28,694 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$FORM.class 2015-04-28 15:10:28,694 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletImpl.class 2015-04-28 15:10:28,694 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$FIELDSET.class 2015-04-28 15:10:28,694 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$DL.class 2015-04-28 15:10:28,694 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_FontSize.class 2015-04-28 15:10:28,694 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$VAR.class 2015-04-28 15:10:28,695 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Table.class 2015-04-28 15:10:28,695 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$BUTTON.class 2015-04-28 15:10:28,695 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$OPTGROUP.class 2015-04-28 15:10:28,695 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_RawContent.class 2015-04-28 15:10:28,695 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$EM.class 2015-04-28 15:10:28,695 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Flow.class 2015-04-28 15:10:28,695 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$SMALL.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$FIELDSET.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$DT.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$INS.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$SUP.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Legend.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$DL.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$BASE.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$CITE.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$FORM.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Form.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Block.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$VAR.class 2015-04-28 15:10:28,696 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$CAPTION.class 2015-04-28 15:10:28,697 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_FontStyle.class 2015-04-28 15:10:28,697 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$SAMP.class 2015-04-28 15:10:28,697 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$TH.class 2015-04-28 15:10:28,697 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Body.class 2015-04-28 15:10:28,697 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$STRONG.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$I18nAttrs.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Phrase.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$ButtonType.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Content.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$HEAD.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$STYLE.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$BODY.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$STRONG.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$ADDRESS.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$COLGROUP.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$SCRIPT.class 2015-04-28 15:10:28,698 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$TITLE.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$TH.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$META.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$OBJECT.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$DIV.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$B.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$H3.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$H1.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Html.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Anchor.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Q.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$DFN.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$TR.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_InsDel.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$MAP.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$STYLE.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$SELECT.class 2015-04-28 15:10:28,699 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$DEL.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$SCRIPT.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$H2.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$OL.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$LinkType.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$A.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$LABEL.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Preformatted.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$TABLE.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_SubSup.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$THEAD.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$INPUT.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$HeadMisc.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$PARAM.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Element.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$OPTGROUP.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$AREA.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Dir.class 2015-04-28 15:10:28,700 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$INPUT.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Shape.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$COL.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Param.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$CODE.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$I.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$BDO.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$TFOOT.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$BUTTON.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$LINK.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$TD.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$SAMP.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$COL.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Label.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$LI.class 2015-04-28 15:10:28,701 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$KBD.class 2015-04-28 15:10:28,702 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletGen.class 2015-04-28 15:10:28,702 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$DEL.class 2015-04-28 15:10:28,702 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$TEXTAREA.class 2015-04-28 15:10:28,702 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$I.class 2015-04-28 15:10:28,702 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$P.class 2015-04-28 15:10:28,702 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$PCData.class 2015-04-28 15:10:28,703 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$CITE.class 2015-04-28 15:10:28,703 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$DT.class 2015-04-28 15:10:28,703 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$ACRONYM.class 2015-04-28 15:10:28,703 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$BODY.class 2015-04-28 15:10:28,703 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$Q.class 2015-04-28 15:10:28,703 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Special.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$SPAN.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$LINK.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$A.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_TableCol.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Special.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$AREA.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$OBJECT.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$LEGEND.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Head.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$IMG.class 2015-04-28 15:10:28,704 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$_Option.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$OPTION.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$SELECT.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$TITLE.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$Method.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletImpl$EOpt.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$FormCtrl.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$HTML.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$BLOCKQUOTE.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletSpec$SUB.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/HamletImpl$EImp.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$HR.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/hamlet/Hamlet$SUB.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/package-info.class 2015-04-28 15:10:28,705 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/HtmlPage.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/TwoColumnCssLayout.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/DefaultPage.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/ErrorPage.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/HtmlBlock.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/Html.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/TextView.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/HtmlPage$_.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/HtmlPage$Page.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/LipsumBlock.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/JQueryUI.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/TextPage.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/NavBlock.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/InfoBlock.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/FooterBlock.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/HtmlBlock$Block.class 2015-04-28 15:10:28,706 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/HeaderBlock.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/view/TwoColumnLayout.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/YarnJacksonJaxbJsonProvider.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/example/package-info.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/example/HelloWorld$Hello.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/example/HelloWorld$HelloView.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/example/MyApp$MyController.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/example/MyApp$MyView.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/example/HelloWorld.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/example/MyApp.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/Router$Dest.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/Dispatcher.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/Controller.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/WebApps$Builder.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/YarnWebParams.class 2015-04-28 15:10:28,707 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/WebApps$Builder$1.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/Params.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/DefaultWrapperServlet.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/Router.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/ForbiddenException.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/MimeType.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/WebApps.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/util/WebAppUtils.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/View.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/Dispatcher$1.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/View$ViewContext.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/Controller$RequestContext.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/ResponseInfo$Item.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/SubView.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/WebApps$Builder$ServletStruct.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/WebApp.class 2015-04-28 15:10:28,708 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/GenericExceptionHandler.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/NotFoundException.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/WebApp$HTTP.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/DefaultWrapperServlet$1.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/WebApps$Builder$2.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/RemoteExceptionData.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/BadRequestException.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/WebAppException.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/webapp/ToJSON.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/package-info.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/Event.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/AsyncDispatcher$2.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/EventHandler.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/AsyncDispatcher$1.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/AsyncDispatcher$MultiListenerHandler.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/Dispatcher.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/AbstractEvent.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/AsyncDispatcher.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/event/AsyncDispatcher$GenericEventHandler.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factory/providers/package-info.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/FileSystemBasedConfigurationProvider.class 2015-04-28 15:10:28,709 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/sharedcache/SharedCacheChecksumFactory.class 2015-04-28 15:10:28,710 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/sharedcache/SharedCacheChecksum.class 2015-04-28 15:10:28,710 DEBUG [main] org.mortbay.log: Skipping entry: org/apache/hadoop/yarn/sharedcache/ChecksumSHA256Impl.class 2015-04-28 15:10:28,710 DEBUG [main] org.mortbay.log: Skipping entry: yarn-default.xml 2015-04-28 15:10:28,710 DEBUG [main] org.mortbay.log: Skipping entry: META-INF/maven/ 2015-04-28 15:10:28,710 DEBUG [main] org.mortbay.log: Skipping entry: META-INF/maven/org.apache.hadoop/ 2015-04-28 15:10:28,710 DEBUG [main] org.mortbay.log: Skipping entry: META-INF/maven/org.apache.hadoop/hadoop-yarn-common/ 2015-04-28 15:10:28,710 DEBUG [main] org.mortbay.log: Skipping entry: META-INF/maven/org.apache.hadoop/hadoop-yarn-common/pom.xml 2015-04-28 15:10:28,710 DEBUG [main] org.mortbay.log: Skipping entry: META-INF/maven/org.apache.hadoop/hadoop-yarn-common/pom.properties 2015-04-28 15:10:28,711 DEBUG [main] org.mortbay.log: Checking Resource aliases 2015-04-28 15:10:28,711 DEBUG [main] org.mortbay.log: webapp=file:/tmp/Jetty_0_0_0_0_13644_mapreduce____.xeeumz/webapp/ 2015-04-28 15:10:28,725 DEBUG [main] org.mortbay.log: Configuring web-jetty.xml 2015-04-28 15:10:28,725 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/hadoop-nfs-2.7.0.jar 2015-04-28 15:10:28,727 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/hadoop-common-2.7.0-tests.jar 2015-04-28 15:10:28,732 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/hadoop-common-2.7.0.jar 2015-04-28 15:10:28,737 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar 2015-04-28 15:10:28,737 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-codec-1.4.jar 2015-04-28 15:10:28,737 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar 2015-04-28 15:10:28,738 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/stax-api-1.0-2.jar 2015-04-28 15:10:28,738 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-beanutils-1.7.0.jar 2015-04-28 15:10:28,738 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/curator-framework-2.7.1.jar 2015-04-28 15:10:28,739 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/xz-1.0.jar 2015-04-28 15:10:28,739 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/log4j-1.2.17.jar 2015-04-28 15:10:28,740 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/gson-2.2.4.jar 2015-04-28 15:10:28,740 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/paranamer-2.3.jar 2015-04-28 15:10:28,740 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar 2015-04-28 15:10:28,741 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/servlet-api-2.5.jar 2015-04-28 15:10:28,742 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/xmlenc-0.52.jar 2015-04-28 15:10:28,742 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/api-asn1-api-1.0.0-M20.jar 2015-04-28 15:10:28,742 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/curator-recipes-2.7.1.jar 2015-04-28 15:10:28,743 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-collections-3.2.1.jar 2015-04-28 15:10:28,744 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jackson-jaxrs-1.9.13.jar 2015-04-28 15:10:28,744 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-net-3.1.jar 2015-04-28 15:10:28,745 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/netty-3.6.2.Final.jar 2015-04-28 15:10:28,747 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/hadoop-annotations-2.7.0.jar 2015-04-28 15:10:28,748 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/hamcrest-core-1.3.jar 2015-04-28 15:10:28,748 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-compress-1.4.1.jar 2015-04-28 15:10:28,748 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/httpcore-4.2.5.jar 2015-04-28 15:10:28,749 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-configuration-1.6.jar 2015-04-28 15:10:28,749 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/apacheds-i18n-2.0.0-M15.jar 2015-04-28 15:10:28,749 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jackson-core-asl-1.9.13.jar 2015-04-28 15:10:28,750 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jackson-xc-1.9.13.jar 2015-04-28 15:10:28,750 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jersey-json-1.9.jar 2015-04-28 15:10:28,750 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/protobuf-java-2.5.0.jar 2015-04-28 15:10:28,750 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/httpclient-4.2.5.jar 2015-04-28 15:10:28,751 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/apacheds-kerberos-codec-2.0.0-M15.jar 2015-04-28 15:10:28,752 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-math3-3.1.1.jar 2015-04-28 15:10:28,754 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jersey-core-1.9.jar 2015-04-28 15:10:28,754 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/asm-3.2.jar 2015-04-28 15:10:28,754 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/junit-4.11.jar 2015-04-28 15:10:28,755 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jsp-api-2.1.jar 2015-04-28 15:10:28,755 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jets3t-0.9.0.jar 2015-04-28 15:10:28,756 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-digester-1.8.jar 2015-04-28 15:10:28,756 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/slf4j-api-1.7.10.jar 2015-04-28 15:10:28,756 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/api-util-1.0.0-M20.jar 2015-04-28 15:10:28,756 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/zookeeper-3.4.6.jar 2015-04-28 15:10:28,757 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/mockito-all-1.8.5.jar 2015-04-28 15:10:28,759 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-lang-2.6.jar 2015-04-28 15:10:28,760 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/avro-1.7.4.jar 2015-04-28 15:10:28,760 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/hadoop-auth-2.7.0.jar 2015-04-28 15:10:28,760 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jetty-6.1.26.jar 2015-04-28 15:10:28,761 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jaxb-api-2.2.2.jar 2015-04-28 15:10:28,761 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jettison-1.1.jar 2015-04-28 15:10:28,761 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-httpclient-3.1.jar 2015-04-28 15:10:28,761 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-io-2.4.jar 2015-04-28 15:10:28,762 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/htrace-core-3.1.0-incubating.jar 2015-04-28 15:10:28,763 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jersey-server-1.9.jar 2015-04-28 15:10:28,763 DEBUG [main] org.mortbay.log: TLD found jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jersey-server-1.9.jar!/META-INF/taglib.tld 2015-04-28 15:10:28,764 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/snappy-java-1.0.4.1.jar 2015-04-28 15:10:28,764 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/guava-11.0.2.jar 2015-04-28 15:10:28,766 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jetty-util-6.1.26.jar 2015-04-28 15:10:28,767 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-cli-1.2.jar 2015-04-28 15:10:28,767 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/curator-client-2.7.1.jar 2015-04-28 15:10:28,767 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jsch-0.1.42.jar 2015-04-28 15:10:28,768 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jackson-mapper-asl-1.9.13.jar 2015-04-28 15:10:28,769 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/activation-1.1.jar 2015-04-28 15:10:28,769 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/commons-logging-1.1.3.jar 2015-04-28 15:10:28,769 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/java-xmlbuilder-0.4.jar 2015-04-28 15:10:28,769 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jsr305-3.0.0.jar 2015-04-28 15:10:28,769 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/hdfs/hadoop-hdfs-2.7.0.jar 2015-04-28 15:10:28,774 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/hdfs/hadoop-hdfs-2.7.0-tests.jar 2015-04-28 15:10:28,776 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/hdfs/hadoop-hdfs-nfs-2.7.0.jar 2015-04-28 15:10:28,776 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/hdfs/lib/xercesImpl-2.9.1.jar 2015-04-28 15:10:28,778 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/hdfs/lib/leveldbjni-all-1.8.jar 2015-04-28 15:10:28,778 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/hdfs/lib/xml-apis-1.3.04.jar 2015-04-28 15:10:28,779 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/hdfs/lib/netty-all-4.0.23.Final.jar 2015-04-28 15:10:28,780 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar 2015-04-28 15:10:28,780 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-server-tests-2.7.0.jar 2015-04-28 15:10:28,781 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-2.7.0.jar 2015-04-28 15:10:28,782 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-2.7.0.jar 2015-04-28 15:10:28,782 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-server-nodemanager-2.7.0.jar 2015-04-28 15:10:28,782 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar 2015-04-28 15:10:28,784 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-server-applicationhistoryservice-2.7.0.jar 2015-04-28 15:10:28,784 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-registry-2.7.0.jar 2015-04-28 15:10:28,784 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-server-web-proxy-2.7.0.jar 2015-04-28 15:10:28,784 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-client-2.7.0.jar 2015-04-28 15:10:28,784 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-api-2.7.0.jar 2015-04-28 15:10:28,786 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-server-common-2.7.0.jar 2015-04-28 15:10:28,786 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-2.7.0.jar 2015-04-28 15:10:28,786 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-server-sharedcachemanager-2.7.0.jar 2015-04-28 15:10:28,786 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/lib/javax.inject-1.jar 2015-04-28 15:10:28,787 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/lib/zookeeper-3.4.6-tests.jar 2015-04-28 15:10:28,787 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/lib/jersey-client-1.9.jar 2015-04-28 15:10:28,787 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/lib/jersey-guice-1.9.jar 2015-04-28 15:10:28,787 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/lib/guice-3.0.jar 2015-04-28 15:10:28,788 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/lib/guice-servlet-3.0.jar 2015-04-28 15:10:28,789 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/lib/aopalliance-1.0.jar 2015-04-28 15:10:28,789 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.0.jar 2015-04-28 15:10:28,789 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/mapreduce/hadoop-mapreduce-client-app-2.7.0.jar 2015-04-28 15:10:28,790 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.0-tests.jar 2015-04-28 15:10:28,791 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-2.7.0.jar 2015-04-28 15:10:28,791 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/mapreduce/hadoop-mapreduce-client-common-2.7.0.jar 2015-04-28 15:10:28,792 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.0.jar 2015-04-28 15:10:28,792 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-2.7.0.jar 2015-04-28 15:10:28,792 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-2.7.0.jar 2015-04-28 15:10:28,792 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.7.0.jar 2015-04-28 15:10:28,794 DEBUG [main] org.mortbay.log: TLD search of file:/opt/bibin/dsperf/HADOP2_7/nmlocal/usercache/dsperf/appcache/application_1430213948957_0001/filecache/11/job.jar/job.jar 2015-04-28 15:10:28,794 DEBUG [main] org.mortbay.log: TLD search of file:/opt/dsperf/jdk1.8.0_40/jre/lib/ext/sunec.jar 2015-04-28 15:10:28,794 DEBUG [main] org.mortbay.log: TLD search of file:/opt/dsperf/jdk1.8.0_40/jre/lib/ext/dnsns.jar 2015-04-28 15:10:28,794 DEBUG [main] org.mortbay.log: TLD search of file:/opt/dsperf/jdk1.8.0_40/jre/lib/ext/jfxrt.jar 2015-04-28 15:10:28,805 DEBUG [main] org.mortbay.log: TLD search of file:/opt/dsperf/jdk1.8.0_40/jre/lib/ext/localedata.jar 2015-04-28 15:10:28,805 DEBUG [main] org.mortbay.log: TLD search of file:/opt/dsperf/jdk1.8.0_40/jre/lib/ext/nashorn.jar 2015-04-28 15:10:28,807 DEBUG [main] org.mortbay.log: TLD search of file:/opt/dsperf/jdk1.8.0_40/jre/lib/ext/zipfs.jar 2015-04-28 15:10:28,807 DEBUG [main] org.mortbay.log: TLD search of file:/opt/dsperf/jdk1.8.0_40/jre/lib/ext/sunjce_provider.jar 2015-04-28 15:10:28,807 DEBUG [main] org.mortbay.log: TLD search of file:/opt/dsperf/jdk1.8.0_40/jre/lib/ext/sunpkcs11.jar 2015-04-28 15:10:28,808 DEBUG [main] org.mortbay.log: TLD search of file:/opt/dsperf/jdk1.8.0_40/jre/lib/ext/cldrdata.jar 2015-04-28 15:10:28,810 DEBUG [main] org.mortbay.log: loaded class org.apache.xerces.jaxp.SAXParserFactoryImpl from sun.misc.Launcher$AppClassLoader@28c97a5 2015-04-28 15:10:28,811 DEBUG [main] org.mortbay.log: loaded class org.apache.xerces.parsers.XIncludeAwareParserConfiguration from sun.misc.Launcher$AppClassLoader@28c97a5 2015-04-28 15:10:28,812 DEBUG [main] org.mortbay.log: loaded class org.apache.xerces.impl.dv.dtd.DTDDVFactoryImpl from sun.misc.Launcher$AppClassLoader@28c97a5 2015-04-28 15:10:28,812 DEBUG [main] org.mortbay.log: getResource(javax/servlet/jsp/resources/web-jsptaglibrary_1_1.dtd)=jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jsp-api-2.1.jar!/javax/servlet/jsp/resources/web-jsptaglibrary_1_1.dtd 2015-04-28 15:10:28,813 DEBUG [main] org.mortbay.log: getResource(javax/servlet/jsp/resources/web-jsptaglibrary_1_2.dtd)=jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jsp-api-2.1.jar!/javax/servlet/jsp/resources/web-jsptaglibrary_1_2.dtd 2015-04-28 15:10:28,813 DEBUG [main] org.mortbay.log: getResource(javax/servlet/jsp/resources/web-jsptaglibrary_2_0.xsd)=jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jsp-api-2.1.jar!/javax/servlet/jsp/resources/web-jsptaglibrary_2_0.xsd 2015-04-28 15:10:28,813 DEBUG [main] org.mortbay.log: getResource(javax/servlet/jsp/resources/web-jsptaglibrary_1_1.dtd)=jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jsp-api-2.1.jar!/javax/servlet/jsp/resources/web-jsptaglibrary_1_1.dtd 2015-04-28 15:10:28,813 DEBUG [main] org.mortbay.log: getResource(javax/servlet/jsp/resources/web-jsptaglibrary_1_2.dtd)=jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jsp-api-2.1.jar!/javax/servlet/jsp/resources/web-jsptaglibrary_1_2.dtd 2015-04-28 15:10:28,813 DEBUG [main] org.mortbay.log: getResource(javax/servlet/jsp/resources/web-jsptaglibrary_2_0.xsd)=jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jsp-api-2.1.jar!/javax/servlet/jsp/resources/web-jsptaglibrary_2_0.xsd 2015-04-28 15:10:28,813 DEBUG [main] org.mortbay.log: TLD=jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jersey-server-1.9.jar!/META-INF/taglib.tld 2015-04-28 15:10:28,821 DEBUG [main] org.mortbay.log: resolveEntity(-//Sun Microsystems, Inc.//DTD JSP Tag Library 1.2//EN, http://java.sun.com/dtd/web-jsptaglibrary_1_2.dtd) 2015-04-28 15:10:28,821 DEBUG [main] org.mortbay.log: Can't exact match entity in redirect map, trying web-jsptaglibrary_1_2.dtd 2015-04-28 15:10:28,822 DEBUG [main] org.mortbay.log: Redirected entity http://java.sun.com/dtd/web-jsptaglibrary_1_2.dtd --> jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/common/lib/jsp-api-2.1.jar!/javax/servlet/jsp/resources/web-jsptaglibrary_1_2.dtd 2015-04-28 15:10:28,828 DEBUG [main] org.mortbay.log: Container Server@4e2c95ee + org.mortbay.jetty.servlet.HashSessionIdManager@5d1e09bc as sessionIdManager 2015-04-28 15:10:28,828 DEBUG [main] org.mortbay.log: Init SecureRandom. 2015-04-28 15:10:28,828 DEBUG [main] org.mortbay.log: started org.mortbay.jetty.servlet.HashSessionIdManager@5d1e09bc 2015-04-28 15:10:28,828 DEBUG [main] org.mortbay.log: started org.mortbay.jetty.servlet.HashSessionManager@2375b321 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER, NoCacheFilter=NoCacheFilter, guice=guice} 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: pathFilters=[(F=NoCacheFilter,[/*],[],15), (F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[*.html, *.jsp],[],15), (F=AM_PROXY_FILTER,[/stacks],[],15), (F=AM_PROXY_FILTER,[/logLevel],[],15), (F=AM_PROXY_FILTER,[/metrics],[],15), (F=AM_PROXY_FILTER,[/jmx],[],15), (F=AM_PROXY_FILTER,[/conf],[],15), (F=AM_PROXY_FILTER,[/mapreduce/*],[],15), (F=AM_PROXY_FILTER,[/ws/*],[],15), (F=guice,[/*],[],15)] 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: servletPathMap={/jmx=jmx, /conf=conf, /stacks=stacks, /logLevel=logLevel, /metrics=metrics, /=org.mortbay.jetty.servlet.DefaultServlet-1596009860} 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: servletNameMap={logLevel=logLevel, jmx=jmx, stacks=stacks, conf=conf, org.mortbay.jetty.servlet.DefaultServlet-1596009860=org.mortbay.jetty.servlet.DefaultServlet-1596009860, metrics=metrics} 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: starting ServletHandler@6e92c6ad 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: started ServletHandler@6e92c6ad 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: starting SecurityHandler@2fb5fe30 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: started SecurityHandler@2fb5fe30 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: starting SessionHandler@456be73c 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: started SessionHandler@456be73c 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: starting org.mortbay.jetty.webapp.WebAppContext@6dc1484{/,jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/mapreduce} 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: starting ErrorPageErrorHandler@5baaae4c 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: started ErrorPageErrorHandler@5baaae4c 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: loaded class org.apache.hadoop.http.NoCacheFilter from sun.misc.Launcher$AppClassLoader@28c97a5 2015-04-28 15:10:28,829 DEBUG [main] org.mortbay.log: Holding class org.apache.hadoop.http.NoCacheFilter 2015-04-28 15:10:28,830 DEBUG [main] org.mortbay.log: started NoCacheFilter 2015-04-28 15:10:28,830 DEBUG [main] org.mortbay.log: loaded class org.apache.hadoop.http.HttpServer2$QuotingInputFilter from sun.misc.Launcher$AppClassLoader@28c97a5 2015-04-28 15:10:28,830 DEBUG [main] org.mortbay.log: Holding class org.apache.hadoop.http.HttpServer2$QuotingInputFilter 2015-04-28 15:10:28,831 DEBUG [main] org.mortbay.log: started safety 2015-04-28 15:10:28,831 DEBUG [main] org.mortbay.log: loaded class org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter from sun.misc.Launcher$AppClassLoader@28c97a5 2015-04-28 15:10:28,831 DEBUG [main] org.mortbay.log: Holding class org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter 2015-04-28 15:10:28,832 DEBUG [main] org.mortbay.log: started AM_PROXY_FILTER 2015-04-28 15:10:28,832 DEBUG [main] org.mortbay.log: loaded class com.google.inject.servlet.GuiceFilter from sun.misc.Launcher$AppClassLoader@28c97a5 2015-04-28 15:10:28,832 DEBUG [main] org.mortbay.log: Holding class com.google.inject.servlet.GuiceFilter 2015-04-28 15:10:28,833 DEBUG [main] org.mortbay.log: started guice 2015-04-28 15:10:28,834 DEBUG [main] org.mortbay.log: started conf 2015-04-28 15:10:28,834 DEBUG [main] org.mortbay.log: started stacks 2015-04-28 15:10:28,834 DEBUG [main] org.mortbay.log: started jmx 2015-04-28 15:10:28,834 DEBUG [main] org.mortbay.log: started logLevel 2015-04-28 15:10:28,834 DEBUG [main] org.mortbay.log: started metrics 2015-04-28 15:10:28,834 DEBUG [main] org.mortbay.log: started org.mortbay.jetty.servlet.DefaultServlet$NIOResourceCache@37095ded 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: resource base = file:/tmp/Jetty_0_0_0_0_13644_mapreduce____.xeeumz/webapp/ 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: started org.mortbay.jetty.servlet.DefaultServlet-1596009860 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: started org.mortbay.jetty.webapp.WebAppContext@6dc1484{/,jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/mapreduce} 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: Container org.mortbay.jetty.servlet.Context@5b6e8f77{/static,jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/static} + ErrorHandler@2eb79cbe as errorHandler 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: filterNameMap={safety=safety, AM_PROXY_FILTER=AM_PROXY_FILTER} 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: pathFilters=[(F=safety,[/*],[],15), (F=AM_PROXY_FILTER,[/*],[],15)] 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: servletFilterMap=null 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: servletPathMap={/*=org.mortbay.jetty.servlet.DefaultServlet-809822663} 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: servletNameMap={org.mortbay.jetty.servlet.DefaultServlet-809822663=org.mortbay.jetty.servlet.DefaultServlet-809822663} 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: starting ServletHandler@41a6d121 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: started ServletHandler@41a6d121 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: starting org.mortbay.jetty.servlet.Context@5b6e8f77{/static,jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/static} 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: starting ErrorHandler@2eb79cbe 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: started ErrorHandler@2eb79cbe 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: Holding class org.mortbay.jetty.servlet.DefaultServlet 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: started org.mortbay.jetty.servlet.DefaultServlet-809822663 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: started org.mortbay.jetty.servlet.Context@5b6e8f77{/static,jar:file:/opt/bibin/dsperf/HADOP2_7/install/hadoop/nodemanager/share/hadoop/yarn/hadoop-yarn-common-2.7.0.jar!/webapps/static} 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: starting ContextHandlerCollection@7ff2b8d2 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: started ContextHandlerCollection@7ff2b8d2 2015-04-28 15:10:28,835 DEBUG [main] org.mortbay.log: starting Server@4e2c95ee 2015-04-28 15:10:28,838 DEBUG [main] org.mortbay.log: started org.mortbay.jetty.nio.SelectChannelConnector$1@56da52a7 2015-04-28 15:10:28,839 INFO [main] org.mortbay.log: Started HttpServer2$SelectChannelConnectorWithSafeStartup@0.0.0.0:13644 2015-04-28 15:10:28,839 DEBUG [main] org.mortbay.log: started HttpServer2$SelectChannelConnectorWithSafeStartup@0.0.0.0:13644 2015-04-28 15:10:28,839 DEBUG [main] org.mortbay.log: started Server@4e2c95ee 2015-04-28 15:10:28,839 INFO [main] org.apache.hadoop.yarn.webapp.WebApps: Web app /mapreduce started at 13644 2015-04-28 15:10:28,944 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /([])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#index 2015-04-28 15:10:28,946 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,952 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,952 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /app([])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#index 2015-04-28 15:10:28,952 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,952 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,952 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /job([:job.id])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#job 2015-04-28 15:10:28,953 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,953 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,953 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /conf([:job.id])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#conf 2015-04-28 15:10:28,953 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,953 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,953 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /jobcounters([:job.id])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#jobCounters 2015-04-28 15:10:28,953 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,953 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,953 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /singlejobcounter([:job.id, :counter.group, :counter.name])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#singleJobCounter 2015-04-28 15:10:28,954 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,954 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,954 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /tasks([:job.id, :task.type, :task.state])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#tasks 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /attempts([:job.id, :task.type, :attempt.state])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#attempts 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /task([:task.id])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#task 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /taskcounters([:task.id])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#taskCounters 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: adding /singletaskcounter([:task.id, :counter.group, :counter.name])->class org.apache.hadoop.mapreduce.v2.app.webapp.AppController#singleTaskCounter 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: trying: org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:28,955 DEBUG [main] org.apache.hadoop.yarn.webapp.Router: found org.apache.hadoop.mapreduce.v2.app.webapp.AppView 2015-04-28 15:10:29,144 INFO [main] org.apache.hadoop.yarn.webapp.WebApps: Registered webapp guice modules 2015-04-28 15:10:29,146 DEBUG [main] org.apache.hadoop.service.AbstractService: Service org.apache.hadoop.mapreduce.v2.app.client.MRClientService is started 2015-04-28 15:10:29,146 DEBUG [main] org.apache.hadoop.service.CompositeService: org.apache.hadoop.mapreduce.v2.app.MRAppMaster: starting services, size=7 2015-04-28 15:10:29,147 DEBUG [main] org.apache.hadoop.service.AbstractService: Service Dispatcher is started 2015-04-28 15:10:29,148 DEBUG [main] org.apache.hadoop.service.AbstractService: Service CommitterEventHandler is started 2015-04-28 15:10:29,148 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: AM_STARTED 2015-04-28 15:10:29,148 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: JOB_SUBMITTED 2015-04-28 15:10:29,148 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: JOB_CREATE 2015-04-28 15:10:29,150 DEBUG [main] org.apache.hadoop.ipc.Server: rpcKind=RPC_WRITABLE, rpcRequestWrapperClass=class org.apache.hadoop.ipc.WritableRpcEngine$Invocation, rpcInvoker=org.apache.hadoop.ipc.WritableRpcEngine$Server$WritableRpcInvoker@4038cd3a 2015-04-28 15:10:29,150 INFO [main] org.apache.hadoop.ipc.CallQueueManager: Using callQueue class java.util.concurrent.LinkedBlockingQueue 2015-04-28 15:10:29,150 DEBUG [main] org.apache.hadoop.ipc.Server: TOKEN authentication enabled for secret manager 2015-04-28 15:10:29,150 DEBUG [main] org.apache.hadoop.ipc.Server: Server accepts auth methods:[TOKEN, SIMPLE] 2015-04-28 15:10:29,151 INFO [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Starting Socket Reader #1 for port 21207 2015-04-28 15:10:29,151 DEBUG [main] org.apache.hadoop.ipc.metrics.RpcMetrics: Initialized MetricsRegistry{info=MetricsInfoImpl{name=rpc, description=rpc}, tags=[MetricsTag{info=MetricsInfoImpl{name=port, description=RPC port}, value=21207}], metrics=[]} 2015-04-28 15:10:29,151 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.receivedBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of received bytes]) 2015-04-28 15:10:29,151 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.sentBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of sent bytes]) 2015-04-28 15:10:29,151 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcQueueTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Queue time]) 2015-04-28 15:10:29,151 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Processsing time]) 2015-04-28 15:10:29,152 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of authentication failures]) 2015-04-28 15:10:29,152 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of authentication successes]) 2015-04-28 15:10:29,152 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of authorization failures]) 2015-04-28 15:10:29,152 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of authorization sucesses]) 2015-04-28 15:10:29,152 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: method public int org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Number of open connections]) 2015-04-28 15:10:29,152 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: method public int org.apache.hadoop.ipc.metrics.RpcMetrics.callQueueLength() with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[Length of the call queue]) 2015-04-28 15:10:29,152 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: RpcActivityForPort21207, Aggregate RPC metrics 2015-04-28 15:10:29,152 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: source.source.start_mbeans 2015-04-28 15:10:29,152 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'MetricsConfig' for key: source.start_mbeans 2015-04-28 15:10:29,152 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: *.source.start_mbeans 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating attr cache... 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done. # tags & metrics=15 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating info cache... 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: [javax.management.MBeanAttributeInfo[description=RPC port, name=tag.port, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Metrics context, name=tag.Context, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Local hostname, name=tag.Hostname, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of received bytes, name=ReceivedBytes, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of sent bytes, name=SentBytes, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of ops for queue time, name=RpcQueueTimeNumOps, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Average time for queue time, name=RpcQueueTimeAvgTime, type=java.lang.Double, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of ops for processsing time, name=RpcProcessingTimeNumOps, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Average time for processsing time, name=RpcProcessingTimeAvgTime, type=java.lang.Double, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of authentication failures, name=RpcAuthenticationFailures, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of authentication successes, name=RpcAuthenticationSuccesses, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of authorization failures, name=RpcAuthorizationFailures, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of authorization sucesses, name=RpcAuthorizationSuccesses, type=java.lang.Long, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Number of open connections, name=NumOpenConnections, type=java.lang.Integer, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Length of the call queue, name=CallQueueLength, type=java.lang.Integer, read-only, descriptor={}]] 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.util.MBeans: Registered Hadoop:service=MRAppMaster,name=RpcActivityForPort21207 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: MBean for source RpcActivityForPort21207 registered. 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Registered source RpcActivityForPort21207 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.ipc.metrics.RpcDetailedMetrics: MetricsInfoImpl{name=rpcdetailed, description=rpcdetailed} 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRates org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.rates with annotation @org.apache.hadoop.metrics2.annotation.Metric(about=, sampleName=Ops, always=false, type=DEFAULT, valueName=Time, value=[]) 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: RpcDetailedActivityForPort21207, Per method RPC metrics 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: source.source.start_mbeans 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'MetricsConfig' for key: source.start_mbeans 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsConfig: poking parent 'PropertiesConfiguration' for key: *.source.start_mbeans 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating attr cache... 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done. # tags & metrics=3 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Updating info cache... 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: [javax.management.MBeanAttributeInfo[description=RPC port, name=tag.port, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Metrics context, name=tag.Context, type=java.lang.String, read-only, descriptor={}], javax.management.MBeanAttributeInfo[description=Local hostname, name=tag.Hostname, type=java.lang.String, read-only, descriptor={}]] 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: Done 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.util.MBeans: Registered Hadoop:service=MRAppMaster,name=RpcDetailedActivityForPort21207 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSourceAdapter: MBean for source RpcDetailedActivityForPort21207 registered. 2015-04-28 15:10:29,153 DEBUG [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Registered source RpcDetailedActivityForPort21207 2015-04-28 15:10:29,154 DEBUG [main] org.apache.hadoop.ipc.Server: RpcKind = RPC_PROTOCOL_BUFFER Protocol Name = org.apache.hadoop.ipc.ProtocolMetaInfoPB version=1 ProtocolImpl=org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos$ProtocolInfoService$2 protocolClass=org.apache.hadoop.ipc.ProtocolMetaInfoPB 2015-04-28 15:10:29,156 DEBUG [main] org.apache.hadoop.ipc.Server: RpcKind = RPC_WRITABLE Protocol Name = org.apache.hadoop.mapred.TaskUmbilicalProtocol version=19 ProtocolImpl=org.apache.hadoop.mapred.TaskAttemptListenerImpl protocolClass=org.apache.hadoop.mapred.TaskUmbilicalProtocol 2015-04-28 15:10:29,157 INFO [IPC Server Responder] org.apache.hadoop.ipc.Server: IPC Server Responder: starting 2015-04-28 15:10:29,157 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: starting 2015-04-28 15:10:29,157 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: starting 2015-04-28 15:10:29,157 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: starting 2015-04-28 15:10:29,158 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: starting 2015-04-28 15:10:29,158 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: starting 2015-04-28 15:10:29,158 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: starting 2015-04-28 15:10:29,158 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 6 on 21207: starting 2015-04-28 15:10:29,158 DEBUG [IPC Server handler 7 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 7 on 21207: starting 2015-04-28 15:10:29,159 DEBUG [IPC Server handler 8 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 8 on 21207: starting 2015-04-28 15:10:29,159 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: starting 2015-04-28 15:10:29,159 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: starting 2015-04-28 15:10:29,159 DEBUG [IPC Server handler 11 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 11 on 21207: starting 2015-04-28 15:10:29,159 DEBUG [IPC Server handler 12 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 12 on 21207: starting 2015-04-28 15:10:29,159 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: starting 2015-04-28 15:10:29,159 DEBUG [IPC Server handler 14 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 14 on 21207: starting 2015-04-28 15:10:29,159 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: starting 2015-04-28 15:10:29,160 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: starting 2015-04-28 15:10:29,160 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: starting 2015-04-28 15:10:29,160 DEBUG [IPC Server handler 19 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 19 on 21207: starting 2015-04-28 15:10:29,160 DEBUG [IPC Server handler 18 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 18 on 21207: starting 2015-04-28 15:10:29,160 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: starting 2015-04-28 15:10:29,160 DEBUG [IPC Server handler 21 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 21 on 21207: starting 2015-04-28 15:10:29,160 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 22 on 21207: starting 2015-04-28 15:10:29,160 DEBUG [IPC Server handler 23 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 23 on 21207: starting 2015-04-28 15:10:29,161 DEBUG [IPC Server handler 24 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 24 on 21207: starting 2015-04-28 15:10:29,161 DEBUG [IPC Server handler 26 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 26 on 21207: starting 2015-04-28 15:10:29,161 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 27 on 21207: starting 2015-04-28 15:10:29,161 DEBUG [IPC Server handler 25 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 25 on 21207: starting 2015-04-28 15:10:29,164 INFO [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: IPC Server listener on 21207: starting 2015-04-28 15:10:29,164 DEBUG [IPC Server handler 28 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 28 on 21207: starting 2015-04-28 15:10:29,164 DEBUG [main] org.apache.hadoop.service.CompositeService: org.apache.hadoop.mapred.TaskAttemptListenerImpl: starting services, size=1 2015-04-28 15:10:29,165 DEBUG [main] org.apache.hadoop.service.AbstractService: Service TaskHeartbeatHandler is started 2015-04-28 15:10:29,166 DEBUG [main] org.apache.hadoop.service.AbstractService: Service org.apache.hadoop.mapred.TaskAttemptListenerImpl is started 2015-04-28 15:10:29,166 DEBUG [main] org.apache.hadoop.service.AbstractService: Service org.apache.hadoop.mapreduce.v2.app.MRAppMaster$StagingDirCleaningService is started 2015-04-28 15:10:29,166 DEBUG [IPC Server handler 29 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 29 on 21207: starting 2015-04-28 15:10:29,181 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: RMCommunicator entered state INITED 2015-04-28 15:10:29,181 INFO [main] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: nodeBlacklistingEnabled:true 2015-04-28 15:10:29,181 INFO [main] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: maxTaskFailuresPerNode is 3 2015-04-28 15:10:29,181 INFO [main] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: blacklistDisablePercent is 33 2015-04-28 15:10:29,230 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.yarn.client.RMProxy.getProxy(RMProxy.java:136) 2015-04-28 15:10:29,230 DEBUG [main] org.apache.hadoop.yarn.ipc.YarnRPC: Creating YarnRPC for org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC 2015-04-28 15:10:29,230 DEBUG [main] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ApplicationMasterProtocol 2015-04-28 15:10:29,238 DEBUG [main] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:29,263 DEBUG [main] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:29,263 DEBUG [main] org.apache.hadoop.ipc.Client: Connecting to /IP127:45017 2015-04-28 15:10:29,264 DEBUG [main] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:29,265 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:29,270 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"RahT2RWQLeRYfNp3PtAbGSqwROTydxaBaPrLH70Z\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:29,276 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ApplicationMasterProtocolPB info:org.apache.hadoop.yarn.security.SchedulerSecurityInfo$1@431f1eaf 2015-04-28 15:10:29,277 DEBUG [main] org.apache.hadoop.yarn.security.AMRMTokenSelector: Looking for a token with service IP127:45017 2015-04-28 15:10:29,277 DEBUG [main] org.apache.hadoop.yarn.security.AMRMTokenSelector: Token kind is YARN_AM_RM_TOKEN and the token's service name is IP127:45017,10.19.92.128:45017 2015-04-28 15:10:29,280 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:29,283 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ApplicationMasterProtocolPB 2015-04-28 15:10:29,284 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEKuZ8YX+/////wE= 2015-04-28 15:10:29,284 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:29,284 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:29,285 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEKuZ8YX+/////wE=\",realm=\"default\",nonce=\"RahT2RWQLeRYfNp3PtAbGSqwROTydxaBaPrLH70Z\",nc=00000001,cnonce=\"JreUuxjLfFdp8ryX/5FcEhFhp+eDxSV/2929oWI4\",digest-uri=\"/default\",maxbuf=65536,response=a827752935ba7486676ee9661f9c50ca,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:29,291 DEBUG [main] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=b049311f683c9e81381553c827e504f9" 2015-04-28 15:10:29,292 DEBUG [main] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:29,292 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf: starting, having connections 2 2015-04-28 15:10:29,292 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #13 2015-04-28 15:10:29,315 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #13 2015-04-28 15:10:29,316 DEBUG [main] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: registerApplicationMaster took 54ms 2015-04-28 15:10:29,330 INFO [main] org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator: maxContainerCapability: 2015-04-28 15:10:29,330 INFO [main] org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator: queue: root.default 2015-04-28 15:10:29,332 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: JOB_QUEUE_CHANGED 2015-04-28 15:10:29,333 DEBUG [main] org.apache.hadoop.service.AbstractService: Service RMCommunicator is started 2015-04-28 15:10:29,333 DEBUG [main] org.apache.hadoop.service.AbstractService: Service org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter is started 2015-04-28 15:10:29,334 DEBUG [main] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl entered state INITED 2015-04-28 15:10:29,335 INFO [main] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Upper limit on the thread pool size is 500 2015-04-28 15:10:29,335 INFO [main] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: The thread pool initial size is 10 2015-04-28 15:10:29,336 INFO [main] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: yarn.client.max-cached-nodemanagers-proxies : 0 2015-04-28 15:10:29,336 DEBUG [main] org.apache.hadoop.yarn.ipc.YarnRPC: Creating YarnRPC for org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC 2015-04-28 15:10:29,337 DEBUG [main] org.apache.hadoop.service.AbstractService: Service org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl is started 2015-04-28 15:10:29,337 DEBUG [main] org.apache.hadoop.service.AbstractService: Service org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerLauncherRouter is started 2015-04-28 15:10:29,338 DEBUG [main] org.apache.hadoop.service.AbstractService: Service JobHistoryEventHandler is started 2015-04-28 15:10:29,338 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobStartEvent.EventType: JOB_START 2015-04-28 15:10:29,339 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_START 2015-04-28 15:10:29,341 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist: masked=rw-r--r-- 2015-04-28 15:10:29,341 DEBUG [main] org.apache.hadoop.service.AbstractService: Service org.apache.hadoop.mapreduce.v2.app.MRAppMaster is started 2015-04-28 15:10:29,344 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: job_1430213948957_0001Job Transitioned from INITED to SETUP 2015-04-28 15:10:29,344 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: JOB_INITED 2015-04-28 15:10:29,344 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: JOB_INFO_CHANGED 2015-04-28 15:10:29,344 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.commit.CommitterJobSetupEvent.EventType: JOB_SETUP 2015-04-28 15:10:29,346 INFO [CommitterEvent Processor #0] org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventHandler: Processing the event EventType: JOB_SETUP 2015-04-28 15:10:29,347 DEBUG [CommitterEvent Processor #0] org.apache.hadoop.hdfs.DFSClient: /user/dsperf/QuasiMonteCarlo_1430213999816_1165157787/out/_temporary/1: masked=rwxr-xr-x 2015-04-28 15:10:29,348 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #15 2015-04-28 15:10:29,348 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #14 2015-04-28 15:10:29,365 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #15 2015-04-28 15:10:29,366 DEBUG [CommitterEvent Processor #0] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: mkdirs took 19ms 2015-04-28 15:10:29,367 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobSetupCompletedEvent.EventType: JOB_SETUP_COMPLETED 2015-04-28 15:10:29,367 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_SETUP_COMPLETED 2015-04-28 15:10:29,368 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: job_1430213948957_0001Job Transitioned from SETUP to RUNNING 2015-04-28 15:10:29,368 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,368 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000000 of type T_SCHEDULE 2015-04-28 15:10:29,390 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #14 2015-04-28 15:10:29,391 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: create took 43ms 2015-04-28 15:10:29,393 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,394 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:29,394 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,395 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:29,397 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000000 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,397 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,397 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000001 of type T_SCHEDULE 2015-04-28 15:10:29,397 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,397 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,397 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:29,399 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000001 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,399 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,399 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000002 of type T_SCHEDULE 2015-04-28 15:10:29,399 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,399 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,399 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:29,399 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000002 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,399 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,399 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000003 of type T_SCHEDULE 2015-04-28 15:10:29,399 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,399 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,399 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:29,400 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000003 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,400 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,400 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000004 of type T_SCHEDULE 2015-04-28 15:10:29,400 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,400 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,400 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:29,400 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000004 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,400 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,401 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000005 of type T_SCHEDULE 2015-04-28 15:10:29,401 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,401 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,401 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:29,402 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000005 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,402 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,402 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000006 of type T_SCHEDULE 2015-04-28 15:10:29,402 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,402 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,402 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:29,402 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000006 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,402 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,402 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000007 of type T_SCHEDULE 2015-04-28 15:10:29,402 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,402 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,402 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:29,403 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000007 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,403 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,403 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000008 of type T_SCHEDULE 2015-04-28 15:10:29,404 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,404 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,404 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:29,404 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000008 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,404 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,404 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000009 of type T_SCHEDULE 2015-04-28 15:10:29,405 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,405 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,405 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:29,405 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000009 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,405 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,405 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000010 of type T_SCHEDULE 2015-04-28 15:10:29,405 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,405 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,406 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:29,406 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000010 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,406 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,406 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000011 of type T_SCHEDULE 2015-04-28 15:10:29,406 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,406 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,407 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:29,411 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000011 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,411 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,411 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000012 of type T_SCHEDULE 2015-04-28 15:10:29,411 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,411 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,411 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:29,411 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000012 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,411 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,411 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000013 of type T_SCHEDULE 2015-04-28 15:10:29,411 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,411 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,412 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:29,412 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000013 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,412 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,412 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000014 of type T_SCHEDULE 2015-04-28 15:10:29,412 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,413 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,413 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:29,413 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000014 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,413 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,413 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000015 of type T_SCHEDULE 2015-04-28 15:10:29,413 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:29,413 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:29,413 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:29,413 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000015 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,413 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent.EventType: T_SCHEDULE 2015-04-28 15:10:29,413 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_r_000000 of type T_SCHEDULE 2015-04-28 15:10:29,413 DEBUG [LeaseRenewer:dsperf@hacluster:8020] org.apache.hadoop.hdfs.LeaseRenewer: Lease renewer daemon for [DFSClient_NONMAPREDUCE_-704522727_1] with renew id 1 started 2015-04-28 15:10:29,416 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Created attempt attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:29,416 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_r_000000 Task Transitioned from NEW to SCHEDULED 2015-04-28 15:10:29,416 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,416 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000000_0 of type TA_SCHEDULE 2015-04-28 15:10:29,417 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000000_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,417 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,418 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,418 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000001_0 of type TA_SCHEDULE 2015-04-28 15:10:29,418 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000001_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,418 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,418 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,418 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000002_0 of type TA_SCHEDULE 2015-04-28 15:10:29,418 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000002_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,418 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,418 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,418 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000003_0 of type TA_SCHEDULE 2015-04-28 15:10:29,419 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000003_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,419 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,419 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,419 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000004_0 of type TA_SCHEDULE 2015-04-28 15:10:29,419 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000004_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,419 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,419 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,420 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000005_0 of type TA_SCHEDULE 2015-04-28 15:10:29,422 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000005_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000006_0 of type TA_SCHEDULE 2015-04-28 15:10:29,422 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000006_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000007_0 of type TA_SCHEDULE 2015-04-28 15:10:29,422 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000007_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000008_0 of type TA_SCHEDULE 2015-04-28 15:10:29,422 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000008_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,422 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000009_0 of type TA_SCHEDULE 2015-04-28 15:10:29,423 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000009_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000010_0 of type TA_SCHEDULE 2015-04-28 15:10:29,423 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000010_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000011_0 of type TA_SCHEDULE 2015-04-28 15:10:29,423 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000011_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000012_0 of type TA_SCHEDULE 2015-04-28 15:10:29,423 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000012_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000013_0 of type TA_SCHEDULE 2015-04-28 15:10:29,423 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000013_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,423 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000014_0 of type TA_SCHEDULE 2015-04-28 15:10:29,424 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000014_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000015_0 of type TA_SCHEDULE 2015-04-28 15:10:29,424 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000015_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_SCHEDULE 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_r_000000_0 of type TA_SCHEDULE 2015-04-28 15:10:29,424 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_r_000000_0 TaskAttempt Transitioned from NEW to UNASSIGNED 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_STARTED 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,424 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent.EventType: CONTAINER_REQ 2015-04-28 15:10:29,425 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: NORMALIZED_RESOURCE 2015-04-28 15:10:29,425 INFO [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: mapResourceRequest: 2015-04-28 15:10:29,426 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,426 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,426 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,427 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: Added priority=20 2015-04-28 15:10:29,433 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=1 #asks=1 2015-04-28 15:10:29,433 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=1 #asks=2 2015-04-28 15:10:29,433 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=1 #asks=3 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=1 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=2 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=2 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=2 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=2 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=3 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=3 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=3 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=3 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=4 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=4 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=4 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=4 #asks=4 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,434 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=5 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=5 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=5 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=5 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=6 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=6 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=6 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=6 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=7 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=7 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=7 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=7 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=8 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=8 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=8 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=8 #asks=4 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,435 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=9 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=9 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=9 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=9 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=10 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=10 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=10 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=10 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=11 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=11 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=11 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=11 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=12 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=12 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=12 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=12 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=13 #asks=4 2015-04-28 15:10:29,436 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=13 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=13 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=13 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=14 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=14 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=14 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=14 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=15 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=15 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=15 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=15 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP143 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to host host-IP117 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Added attempt req to rack /default-rack 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=16 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=16 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=16 #asks=4 2015-04-28 15:10:29,437 DEBUG [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=16 #asks=4 2015-04-28 15:10:29,437 INFO [Thread-50] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: reduceResourceRequest: 2015-04-28 15:10:29,438 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: NORMALIZED_RESOURCE 2015-04-28 15:10:29,463 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=0, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=0 2015-04-28 15:10:29,477 INFO [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Event Writer setup for JobId: job_1430213948957_0001, File: hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:29,477 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1_conf.xml: masked=rw-r--r-- 2015-04-28 15:10:29,478 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #16 2015-04-28 15:10:29,507 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #16 2015-04-28 15:10:29,507 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: create took 30ms 2015-04-28 15:10:29,507 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1_conf.xml, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:29,509 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for all properties in config... 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.address 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.rpc-address.hacluster.nn2 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.rpc-address.hacluster.nn1 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.resource.check.interval 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.client.thread-count 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.admin.acl 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.job.committer.cancel-timeout 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.emit-timeline-data 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.journalnode.rpc-address 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.leveldb-state-store.path 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapred.mapper.new-api 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ipc.client.connection.maxidletime 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.process-kill-wait.ms 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.handler.count 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.map.index.interval 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.https-address 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.profile.reduces 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3n.multipart.uploads.enabled 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.seqfile.sorter.recordlimit 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.tasks.sleeptimebeforesigkill 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.util.hash.type 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.replication.min 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.jobhistory.block.size 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.fs-limits.min-block-size 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.AbstractFileSystem.file.impl 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for net.topology.script.number.args 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.container-tokens.master-key-rolling-interval-secs 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.map.output.compress.codec 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.windows-container.memory-limit.enabled 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.input.fileinputformat.split.minsize 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.group.mapping 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.system.dir 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.end-notification.max.attempts 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.speculative 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.localizer.cache.cleanup.interval-ms 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.threads.core 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.leveldb-timeline-store.start-time-read-cache-size 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.replication.interval 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.admin.address 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.maps 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.ubertask.enable 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.reduce.class 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.use.datanode.hostname 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.am.max-attempts 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.zk-num-retries 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for s3.blocksize 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.data.dir 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.shuffle.parallelcopies 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3.buffer.dir 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.resource-tracker.address.rm1 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.resource-tracker.address.rm2 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.zk.retry.ceiling.ms 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.data.dir.perm 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.env-whitelist 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.xattrs.enabled 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.linux-container-executor.cgroups.hierarchy 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.recovery.enabled 2015-04-28 15:10:29,510 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.container.log.backups 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.disk-health-checker.interval-ms 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.list.cache.directives.num.responses 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.admin.address.rm1 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.admin.address.rm2 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.max.total.tasks 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.maxattempts 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.shuffle.port 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.resource-tracker.client.thread-count 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.replication.considerLoad 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.delete.thread-count 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.admin-env 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.proxy-user-privileges.enabled 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.speculative.speculative-cap-total-tasks 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.speculative.slowtaskthreshold 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ftp.replication 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.cleaner.initial-delay-mins 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for s3native.client-write-packet-size 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for file.bytes-per-checksum 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.slow.io.warning.threshold.ms 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.seqfile.lazydecompress 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.skip.start.attempts 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.reject-unresolved-dn-topology-mapping 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.admin.address 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.taskcache.levels 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.jvm.numtasks 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.top.num.users 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.linux-container-executor.cgroups.mount 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.checksum.algo.impl 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.classloader 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.log-aggregation-enable 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.shuffle.fetch.retry.interval-ms 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.nodemanager.minimum.version 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.kms.client.encrypted.key.cache.size 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.output.fileoutputformat.compress.type 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.hdfs.configuration.version 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.log.retain-seconds 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.local-cache.max-files-per-directory 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.end-notification.retry.interval 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.failover-controller.new-active.rpc-timeout.ms 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.ssl.hostname.verifier 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.read.shortcircuit 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for s3native.blocksize 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.ha.resourcemanager.addresses 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.failover.sleep.base.millis 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.permissions.superusergroup 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.zk.retry.times 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.blockreport.initialDelay 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.scheduler.maximum-allocation-mb 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.io.sort.factor 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.failover.sleep.max.millis 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3.sleepTimeSeconds 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.health-monitor.rpc-timeout.ms 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.linux-container-executor.nonsecure-mode.limit-users 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.AbstractFileSystem.viewfs.impl 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.ftp.host 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.web-proxy.address 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.linux-container-executor.nonsecure-mode.local-user 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.impl 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.fs-limits.max-blocks-per-file 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.http.threads 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.am-rm-tokens.master-key-rolling-interval-secs 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.compression.codec.bzip2.library 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.map.skip.maxrecords 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.loadedjobs.cache.size 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.storage.policy.enabled 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.client.output.filter 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.client.best-effort 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.write.packet.size 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.persist.jobstatus.hours 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.block-pinning.enabled 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.speculative.retry-after-no-speculate 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for s3native.stream-buffer-size 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.seqfile.local.dir 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.encrypt.data.transfer.cipher.key.bitlength 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.sync.behind.writes 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.log.level 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.application.attempt.id 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.stale.datanode.interval 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.io.sort.mb 2015-04-28 15:10:29,511 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.zk-state-store.parent-path 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.client.resolve.remote.symlinks 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.ssl.enabled.protocols 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.cpu.vcores 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.client.failover-retries 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.address 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.ssl.enabled 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.name.dir 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.block.access.token.enable 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.speculative.retry-after-speculate 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.address 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.scheduler.fair.allocation.file 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ipc.client.connect.max.retries 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.short.circuit.shared.memory.watcher.interrupt.check.ms 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.handler.count 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.ha.automatic-failover.embedded 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.profile.map.params 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.nodemanagers.heartbeat-interval-ms 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.map.class 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.resource.checked.volumes.minimum 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.keytab 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.client.max-cached-nodemanagers-proxies 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.trash.checkpoint.interval 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.app-checker.class 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.journalnode.http-address 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.staging-dir 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nm.liveness-monitor.expiry-interval-ms 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.shuffle.merge.percent 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.retrycache.heap.percent 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ipc.client.connect.timeout 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.local-dirs 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.recovery.enabled 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.zookeeper-store.path 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.outputformat.class 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.am.max-attempts 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for s3.replication 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.image.compress 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.available-space-volume-choosing-policy.balanced-space-preference-fraction 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.edit.log.autoroll.multiplier.threshold 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.group.mapping.ldap.ssl 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.checkpoint.check.period 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.defaultFS 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.group.mapping.ldap.search.attr.group.name 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.container-monitor.procfs-tree.smaps-based-rss.enabled 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.map.sort.spill.percent 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.crypto.codec.classes.aes.ctr.nopadding 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.http-address 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.groups.negative-cache.secs 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.ha.namenodes.hacluster 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.ssl.server.conf 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.client.nodemanager-client-async.thread-pool-max-size 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.staging.root.dir 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.admin.address 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.startup.delay.block.deletion.sec 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.health-checker.interval-ms 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.checkpoint.max-retries 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ftp.client-write-packet-size 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.keytab 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.shuffle.fetch.retry.enabled 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.task.container.log.backups 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.heartbeat.interval 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.zookeeper.session-timeout.ms 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.http.authentication.signature.secret.file 2015-04-28 15:10:29,512 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.log-aggregation.compression-type 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.work-preserving-recovery.scheduling-wait-ms 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.log-dirs 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.zk-address 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.speculative.minimum-allowed-tasks 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.cache.revocation.timeout.ms 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.recovery.store.class 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.combine.progress.records 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.instrumentation 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.address 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.address.rm1 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.address.rm2 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.reduces 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.address 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.configuration.provider-class 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.kms.client.encrypted.key.cache.expiry 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.enabled 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for tfile.io.chunk.size 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.zk.session.timeout.ms 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.health-monitor.sleep-after-disconnect.ms 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.reduce.tasks.maximum 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.client.failover.proxy.provider 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.directoryscan.threads 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.directoryscan.interval 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.http.authentication.token.validity 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.failover-controller.graceful-fence.rpc-timeout.ms 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.local.dir.minspacekill 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.cleaner.interval-ms 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.datanode.registration.ip-hostname-check 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.http.address 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.backup.http-address 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.outofband.heartbeat 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.shuffle.read.timeout 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.crypto.buffer.size 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.skip.proc.count.autoincr 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.ifile.readahead.bytes 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.secure 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.safemode.min.datanodes 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.report.address 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.http-authentication.type 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.webhdfs.enabled 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.avoid.write.stale.datanode 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.log-aggregation.retain-seconds 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.complete.cancel.delegation.tokens 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.shuffle.connection-keep-alive.timeout 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.scheduler.minimum-allocation-vcores 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.client.retry-interval-ms 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.client.max-retries 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.shuffle.max.threads 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for nfs.exports.allowed.hosts 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.mmap.cache.size 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.file.buffer.size 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for rpc.engine.org.apache.hadoop.ipc.ProtocolMetaInfoPB 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.checkpoint.txns 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ipc.client.connect.retry.interval 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.journalnode.edits.dir 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.shuffle.connect.timeout 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.fs.state-store.uri 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.zk.connection.timeout.ms 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapred.queue.default.acl-administer-jobs 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.submithostaddress 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.cachereport.intervalMsec 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.container.log.limit.kb 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.resourcemanager.minimum.version 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.address 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for file.stream-buffer-size 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.ubertask.maxreduces 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ipc.client.idlethreshold 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ftp.stream-buffer-size 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.client-server.address 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.failover.connection.retries.on.timeouts 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.replication.work.multiplier.per.iteration 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.http.authentication.simple.anonymous.allowed 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.client.nodemanager-connect.retry-interval-ms 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.linux-container-executor.resources-handler.class 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.leveldb-timeline-store.read-cache-size 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.authentication 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.image.compression.codec 2015-04-28 15:10:29,513 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.files.preserve.failedtasks 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.read.shortcircuit.streams.cache.size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for file.replication 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.joblist.cache.size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.work-preserving-recovery.enabled 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.fs-limits.max-xattrs-per-inode 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.image.transfer.timeout 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for nfs.wtmax 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.multipart.purge 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.secondary.namenode.kerberos.internal.spnego.principal 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.connection.establish.timeout 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.ha.rm-ids 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.stream-buffer-size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.invalidate.work.pct.per.iteration 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.multipart.purge.age 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.scheduler.client.thread-count 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for tfile.fs.input.buffer.size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.http.authentication.type 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.scheduler.fair.user-as-default-queue 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.list.encryption.zones.num.responses 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.map.cpu.vcores 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.decommission.interval 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ftp.bytes-per-checksum 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.user.home.dir.prefix 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.pmem-check-enabled 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.inotify.max.events.per.rpc 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.profile.maps 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.shuffle.ssl.file.buffer.size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.webapp.https.address 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.command-opts 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.nm.uploader.replication.factor 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.zk.root 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.client.failover-proxy-provider 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.remote-app-log-dir-suffix 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.principal 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for nfs.mountd.port 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.merge.inmem.threshold 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.generic-application-history.fs-history-store.uri 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.jobhistory.lru.cache.size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.output.key.class 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.num.checkpoints.retained 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.queuename 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.max-age-ms 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.localizer.client.thread-count 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.uploader.server.thread-count 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.blockreport.split.threshold 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.balance.bandwidthPerSec 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.block.scanner.volume.bytes.per.second 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.zookeeper.quorum 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.default.chunk.view.size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.datestring.cache.size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.profile.params 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.handler.count 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.image.transfer.bandwidthPerSec 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.expire.trackers.interval 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.client.max-retries 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.handler-thread-count 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ipc.server.listen.queue.size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.threads.max 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.connect.max-wait.ms 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.max.split.locations 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.scheduler.class 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.blocksize 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.shuffle.connection-keep-alive.enable 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.threads.keepalivetime 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.failover-controller.cli-check.rpc-timeout.ms 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.zookeeper.acl 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.write.stale.datanode.ratio 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.encrypt.data.transfer 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.shared.file.descriptor.paths 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.input.lineinputformat.linespermap 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.localizer.fetch.thread-count 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.scheduler.address 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.leveldb-timeline-store.start-time-write-cache-size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.read.shortcircuit.skip.checksum 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.shuffle.ssl.enabled 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.log.level 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.rm.enabled 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.system-metrics-publisher.dispatcher.pool-size 2015-04-28 15:10:29,514 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.use.datanode.hostname 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.ha.enabled 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapred.reducer.new-api 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.multipart.threshold 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.shuffle.memory.limit.percent 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.https.server.keystore.resource 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.taskscheduler 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.kerberos.internal.spnego.principal 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.state-store.max-completed-applications 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.dns.interface 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for map.sort.class 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.buffer.dir 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.shuffle.retry-delay.max.ms 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.client.progressmonitor.pollinterval 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.shuffle.log.limit.kb 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.max.locked.memory 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.retrycache.expirytime.millis 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.scan.period.hours 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.block.write.replace-datanode-on-failure.best-effort 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.move.interval-ms 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.ha.fencing.ssh.connect-timeout 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.log-aggregation.roll-monitoring-interval-seconds 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.fs-limits.max-component-length 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.ha.fencing.methods 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.state-store-class 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.ipc.address 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.block.write.replace-datanode-on-failure.policy 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.path.based.cache.retry.interval.ms 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.crypto.cipher.suite 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.ha.tail-edits.period 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.jaas.context 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.hostname 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.group.mapping.ldap.search.filter.group 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.group.mapping.ldap.search.filter.user 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.edits.dir 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.shared.edits.dir 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.client.failover-retries-on-socket-timeouts 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.decommission.max.concurrent.tracked.nodes 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.recovery.store.leveldb.path 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.store.class 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.windows-container.cpu-limit.enabled 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.vmem-pmem-ratio 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.checkpoint.period 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.ha.automatic-failover.enabled 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.scheduler.monitor.policies 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.containerlauncher.threadpool-initial-size 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapred.child.java.opts 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.retiredjobs.cache.size 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.https.need-auth 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.ftp.host.port 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.avoid.read.stale.datanode 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.end-notification.retry.attempts 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.ipc.rpc.class 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.cluster.acls.enabled 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.aux-services 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.ubertask.maxmaps 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.container-manager.thread-count 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.app-submission.cross-platform 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.reducer.preempt.delay.sec 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for s3native.bytes-per-checksum 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.path.based.cache.block.map.allocation.percent 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.markreset.buffer.percent 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.cache.revocation.polling.ms 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.lazypersist.file.scrub.interval.sec 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.recovery.store.fs.uri 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.zk.retry.interval.ms 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.keytab 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for nfs.dump.dir 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.user.name 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.scheduler.address.rm1 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.maxtasks.perjob 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.scheduler.address.rm2 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.delete.debug-delay-sec 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.ttl-enable 2015-04-28 15:10:29,515 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.fs.state-store.retry-policy-spec 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.skip.maxgroups 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.trash.interval 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.name 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.heartbeats.in.second 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.persist.jobstatus.active 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.done-dir 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.instrumentation.requires.admin 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for nfs.rtmax 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.container.liveness-monitor.interval-ms 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.backup.address 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.readahead.bytes 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.cleaner.enable 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.block.write.retries 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.http.address 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.failover-controller.graceful-fence.connection.retries 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.safemode.threshold-pct 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.java.secure.random.algorithm 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.dns.nameserver 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.cluster.temp.dir 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.client.submit.file.replication 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.edits.journal-plugin.qjournal 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.write.exclude.nodes.cache.expiry.interval.millis 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.nameservices 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.mmap.cache.timeout.ms 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.skip.checksum.errors 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.hostname 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.acl.enable 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.fast.upload 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for file.blocksize 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.rpc.socket.factory.class.default 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.common.configuration.version 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.client.thread-count 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.drop.cache.behind.reads 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.output.value.class 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.output.fileoutputformat.outputdir 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.linux-container-executor.nonsecure-mode.user-pattern 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.zk-timeout-ms 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.max-completed-applications 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.cleaner.period-mins 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.tasktracker.maxblacklists 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.end-notification.max.retry.interval 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.acl-view-job 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.job.task.listener.thread-count 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.resource.cpu-vcores 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.edit.log.autoroll.check.interval.ms 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.group.mapping.ldap.search.attr.member 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.ssl.client.conf 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.root-dir 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.journalnode.https-address 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.shuffle.fetch.retry.timeout-ms 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.bytes-per-checksum 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.max.objects 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.instrumentation 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.max.transfer.threads 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.block.access.key.update.interval 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.jobhistory.task.numberprogresssplits 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.map.memory.mb 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.hdfs-blocks-metadata.enabled 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.healthchecker.interval 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.image.transfer.chunksize 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.taskmemorymanager.monitoringinterval 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.https.keystore.resource 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.connect.retry-interval.ms 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.webapp.address 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.scheduler.minimum-allocation-mb 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.cleaner.resource-sleep-ms 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for net.topology.impl 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.seqfile.compress.blocksize 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.AbstractFileSystem.ftp.impl 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.checkpoint.edits.dir 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.running.reduce.limit 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.heartbeat.recheck-interval 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.safemode.extension 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.reduce.shuffle.consumer.plugin.class 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.vmem-check-enabled 2015-04-28 15:10:29,516 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.persist.jobstatus.dir 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.delegation.key.update-interval 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.rpc.protection 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.permissions.umask-mode 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.http.staticuser.user 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.connection.maximum 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.paging.maximum 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.http.authentication.kerberos.keytab 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.webinterface.trusted 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.dns.nameserver 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.block.write.replace-datanode-on-failure.enable 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.use.legacy.blockreader.local 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.checkpoint.dir 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.top.windows.minutes 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.maxtaskfailures.per.tracker 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for net.topology.node.switch.mapping.impl 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.shuffle.max.connections 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.client.application-client-protocol.poll-interval-ms 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.localizer.address 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.list.cache.pools.num.responses 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for nfs.server.port 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.zookeeper.parent-znode 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.admin.thread-count 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.resource.cpu-vcores 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.ha.fencing.ssh.nc-command 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.http.policy 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.attempts.maximum 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.log-aggregation.retain-check-interval-seconds 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3n.multipart.copy.block.size 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.jar 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.zk-acl 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.ssl.keystores.factory.class 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.split.metainfo.maxsize 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3.maxRetries 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.random.device.file.path 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.client.nodemanager-connect.max-wait-ms 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.http-address.hacluster.nn2 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.webapp.address.rm2 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.http-address.hacluster.nn1 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.webapp.address.rm1 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.client-am.ipc.max-retries 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.linux-container-executor.cgroups.strict-resource-usage 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.am.max-retries 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.replication.max 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.https.address 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ipc.client.kill.max 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.committer.setup.cleanup.needed 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.format.allow 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.domain.socket.data.traffic 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.localizer.cache.target-size-mb 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.admin.client.thread-count 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobtracker.restart.recover 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.store-class 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.tmp.dir 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.ttl-ms 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.map.speculative 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.recovery.enabled 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.recovery.dir 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.counters.max 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.max.extra.edits.segments.retained 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.webhdfs.user.provider.user.pattern 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.mmap.enabled 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.map.log.level 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.file-block-storage-locations.timeout.millis 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.scheduler.heartbeat.interval-ms 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.fuse.timer.period 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.local.dir.minspacestart 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.health-monitor.check-interval.ms 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.docker-container-executor.exec-name 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.fs.state-store.retry-interval-ms 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.output.fileoutputformat.compress 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.native.lib.available 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.store.in-memory.staleness-period-mins 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.input.fileinputformat.inputdir 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.AbstractFileSystem.har.impl 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.running.map.limit 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.input.buffer.percent 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.webapp.address 2015-04-28 15:10:29,517 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.multipart.size 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.slow.io.warning.threshold.ms 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.job.committer.commit-window 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.submithostname 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.ifile.readahead 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for s3native.replication 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for s3.stream-buffer-size 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.fsdatasetcache.max.threads.per.volume 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.output.fileoutputformat.compress.codec 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.store.in-memory.initial-delay-mins 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.webapp.address 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.userlog.limit.kb 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.connection.ssl.enabled 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.webapp.address 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.fuse.connection.timeout 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.input.fileinputformat.numinputfiles 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ipc.server.max.connections 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.resource.mb 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.groups.cache.secs 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.container-monitor.interval-ms 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for s3.client-write-packet-size 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.replication 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.shuffle.transfer.buffer.size 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.ha.fencing.ssh.private-key-files 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.group.mapping.ldap.directory.search.timeout 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.available-space-volume-choosing-policy.balanced-space-threshold 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.work.around.non.threadsafe.getpwuid 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.client-am.ipc.max-retries-on-timeouts 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.taskcontroller 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.indexcache.mb 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.scheduler.maximum-allocation-vcores 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.sleep-delay-before-sigkill.ms 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.cluster-id 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.acl-modify-job 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.automatic.close 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.input.fileinputformat.list-status.num-threads 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.acls.enabled 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.short.circuit.replica.stale.threshold.ms 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3.block.size 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.resource.du.reserved 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.intermediate-done-dir 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.edits.noeditlogchannelflush 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.shuffle.input.buffer.percent 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.map.maxattempts 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.http.policy 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.audit.loggers 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.groups.cache.warn.after.ms 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.serializations 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.application.priority-labels 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.http.policy 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.failover.proxy.provider.hacluster 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.file-block-storage-locations.num-threads 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.cluster.local.dir 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.secondary.https-address 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.kerberos.kinit.command 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.block.access.token.lifetime 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.delegation.token.max-lifetime 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.drop.cache.behind.writes 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.local.clientfactory.class.name 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.num.extra.edits.retained 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ipc.client.connect.max.retries.on.timeouts 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3n.block.size 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.map.output.collector.class 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ha.health-monitor.connect-retry-interval.ms 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.map.tasks.maximum 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.datanode-restart.timeout 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.mapfile.bloom.size 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.kms.client.authentication.retry-count 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client-write-packet-size 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.swift.impl 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.shuffle.log.backups 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ftp.blocksize 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.scheduler.monitor.enable 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for nfs.allow.insecure.ports 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.nm.uploader.thread-count 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.authorization 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.am.liveness-monitor.expiry-interval-ms 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.har.impl.disable.cache 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.leveldb-timeline-store.path 2015-04-28 15:10:29,518 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.reduce.slowstart.completedmaps 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.minicluster.fixed.ports 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.ha.automatic-failover.enabled 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.userlog.retain.hours 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.accesstime.precision 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.mapfile.bloom.error.rate 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.store.class 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.leveldb-state-store.path 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.support.allow.format 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.container-executor.class 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.top.enabled 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.shuffle.log.separate 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.kms.client.encrypted.key.cache.low-watermark 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.fast.buffer.size 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.user.group.static.mapping.overrides 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.cached.conn.retry 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.path.based.cache.refresh.interval.ms 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.fs-limits.max-directory-items 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.zk-retry-interval-ms 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.ha.log-roll.period 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for ipc.client.fallback-to-simple-auth-allowed 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.remote-app-log-dir 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.healthchecker.script.timeout 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.resource.percentage-physical-cpu-limit 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.fs-limits.max-xattr-size 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.http.address 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.blocks.per.postponedblocks.rescan 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.jetty.logs.serve.aliases 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.ha.id 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.admin.acl 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.app.mapreduce.am.hard-kill-timeout-ms 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.webapp.address 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.recovery.enable 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.store.in-memory.check-period-mins 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.df.interval 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.enabled 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.profile 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.hostname 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.token.tracking.ids.enabled 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.scheduler.increment-allocation-mb 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.mmap.retry.timeout.ms 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.move.thread-count 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.permissions.enabled 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.AbstractFileSystem.hdfs.impl 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.inputformat.class 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.http.filter.initializers 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.http-authentication.simple.anonymous.allowed 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.client-server.thread-count 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.resource-tracker.address 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.working.dir 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.decommission.blocks.per.interval 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for rpc.metrics.quantile.enable 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.timeout 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.resource.memory-mb 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.disk-health-checker.min-healthy-disks 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.failed.volumes.tolerated 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.fileoutputcommitter.algorithm.version 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.framework.name 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.map.skip.proc.count.autoincr 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.system-metrics-publisher.enabled 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.nested-level 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3a.connection.timeout 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.timeline-service.generic-application-history.enabled 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for file.client-write-packet-size 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.delayed.delegation-token.removal-interval-ms 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.failover.max.attempts 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.read.shortcircuit.streams.cache.expiry.ms 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.client.genericoptionsparser.used 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.health-checker.script.timeout-ms 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.fs.state-store.num-retries 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.ssl.require.client.cert 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.uid.cache.secs 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.jobhistory.keytab 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.ha.automatic-failover.zk-base-path 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.tasktracker.dns.interface 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.speculative.speculative-cap-running-tasks 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.block.id.layout.upgrade.threads 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.context 2015-04-28 15:10:29,519 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.delegation.token.renew-interval 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.dispatcher.exit-on-error 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.blockreport.intervalMsec 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for io.map.index.skip 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.hdfs-servers 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.map.output.compress 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.security.kms.client.encrypted.key.cache.num.refill.threads 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.s3n.multipart.uploads.block.size 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.merge.progress.records 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.aux-services.mapreduce_shuffle.class 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for tfile.fs.output.buffer.size 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for fs.du.interval 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.client.failover.connection.retries 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.top.window.num.buckets 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.job.dir 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.sharedcache.uploader.server.address 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for rpc.engine.org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.zk.quorum 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.enable.retrycache 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.datanode.du.reserved 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.registry.system.acls 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.task.profile.reduce.params 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.reduce.memory.mb 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for hadoop.http.authentication.kerberos.principal 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.nodemanager.disk-health-checker.min-free-space-per-disk-mb 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for mapreduce.client.completion.pollinterval 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.name.dir.restore 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for dfs.namenode.secondary.http-address 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.ha.enable 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for s3.bytes-per-checksum 2015-04-28 15:10:29,520 DEBUG [eventHandlingThread] org.apache.hadoop.conf.Configuration: Handling deprecation for yarn.resourcemanager.webapp.https.address 2015-04-28 15:10:29,572 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=0, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1_conf.xml, packetSize=65016, chunksPerPacket=126, bytesCurBlock=0 2015-04-28 15:10:29,602 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk packet full seqno=0, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1_conf.xml, bytesCurBlock=64512, blockSize=33554432, appendChunk=false 2015-04-28 15:10:29,602 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 0 2015-04-28 15:10:29,602 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1_conf.xml, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:29,602 DEBUG [Thread-56] org.apache.hadoop.hdfs.DFSClient: Allocating new block 2015-04-28 15:10:29,604 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=1, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1_conf.xml, packetSize=65016, chunksPerPacket=126, bytesCurBlock=64512 2015-04-28 15:10:29,612 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #17 2015-04-28 15:10:29,613 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 1 2015-04-28 15:10:29,613 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 2 2015-04-28 15:10:29,613 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 2 2015-04-28 15:10:29,621 DEBUG [IPC Server listener on 48332] org.apache.hadoop.ipc.Server: Server connection from IP117:43359; # active connections: 1; # queued calls: 0 2015-04-28 15:10:29,623 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:29,626 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { effectiveUser: "dsperf" } protocol: "org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB" 2015-04-28 15:10:29,626 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #107 2015-04-28 15:10:29,627 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#107 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:29,627 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:29,632 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #17 2015-04-28 15:10:29,632 DEBUG [Thread-56] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: addBlock took 20ms 2015-04-28 15:10:29,636 DEBUG [Thread-56] org.apache.hadoop.hdfs.DFSClient: pipeline = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:29,636 DEBUG [Thread-56] org.apache.hadoop.hdfs.DFSClient: pipeline = DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK] 2015-04-28 15:10:29,636 DEBUG [Thread-56] org.apache.hadoop.hdfs.DFSClient: Connecting to datanode IP143:50076 2015-04-28 15:10:29,636 DEBUG [Thread-56] org.apache.hadoop.hdfs.DFSClient: Send buf size 131072 2015-04-28 15:10:29,637 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #18 2015-04-28 15:10:29,639 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #18 2015-04-28 15:10:29,639 DEBUG [Thread-56] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getServerDefaults took 2ms 2015-04-28 15:10:29,639 DEBUG [Thread-56] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient: SASL client skipping handshake in unsecured configuration for addr = /IP143, datanodeId = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:29,673 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 25 procesingTime= 22 2015-04-28 15:10:29,676 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#107 Retry#0 2015-04-28 15:10:29,676 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#107 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:29,682 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #108 2015-04-28 15:10:29,682 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#108 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:29,682 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:29,683 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:29,683 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#108 Retry#0 2015-04-28 15:10:29,683 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#108 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:29,862 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1_conf.xml block BP-340492689-IP127-1430213926415:blk_1073741845_1021] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741845_1021 sending packet packet seqno: 0 offsetInBlock: 0 lastPacketInBlock: false lastByteOffsetInBlock: 64512 2015-04-28 15:10:29,863 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1_conf.xml block BP-340492689-IP127-1430213926415:blk_1073741845_1021] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741845_1021 sending packet packet seqno: 1 offsetInBlock: 64512 lastPacketInBlock: false lastByteOffsetInBlock: 119732 2015-04-28 15:10:29,907 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741845_1021] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 0 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 13682462 flag: 0 flag: 0 2015-04-28 15:10:29,909 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741845_1021] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 1 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 16477850 flag: 0 flag: 0 2015-04-28 15:10:29,909 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1_conf.xml block BP-340492689-IP127-1430213926415:blk_1073741845_1021] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741845_1021 sending packet packet seqno: 2 offsetInBlock: 119732 lastPacketInBlock: true lastByteOffsetInBlock: 119732 2015-04-28 15:10:29,913 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741845_1021] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 2 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1998130 flag: 0 flag: 0 2015-04-28 15:10:29,916 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #19 2015-04-28 15:10:29,932 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #19 2015-04-28 15:10:29,932 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: complete took 16ms 2015-04-28 15:10:29,938 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,959 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler AM_STARTED 2015-04-28 15:10:29,959 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,960 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler JOB_SUBMITTED 2015-04-28 15:10:29,960 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,961 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler JOB_QUEUE_CHANGED 2015-04-28 15:10:29,961 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,961 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler JOB_INITED 2015-04-28 15:10:29,961 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,961 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler JOB_INFO_CHANGED 2015-04-28 15:10:29,961 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,961 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,961 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,961 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,961 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,963 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,964 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,964 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,964 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,964 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:29,964 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_STARTED 2015-04-28 15:10:29,964 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler NORMALIZED_RESOURCE 2015-04-28 15:10:29,964 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler NORMALIZED_RESOURCE 2015-04-28 15:10:30,333 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:16 ScheduledReds:0 AssignedMaps:0 AssignedReds:0 CompletedMaps:0 CompletedReds:0 ContAlloc:0 ContRel:0 HostLocal:0 RackLocal:0 2015-04-28 15:10:30,354 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #20 2015-04-28 15:10:30,386 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #20 2015-04-28 15:10:30,386 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 32ms 2015-04-28 15:10:30,403 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=0 finishedContainers=0 resourcelimit= knownNMs=2 2015-04-28 15:10:30,403 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: headroom= 2015-04-28 15:10:30,404 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:30,404 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:30,687 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #109 2015-04-28 15:10:30,687 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#109 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:30,687 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:30,688 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:30,688 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#109 Retry#0 2015-04-28 15:10:30,688 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#109 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:30,699 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #110 2015-04-28 15:10:30,699 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#110 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:30,699 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:30,707 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 2 procesingTime= 6 2015-04-28 15:10:30,708 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#110 Retry#0 2015-04-28 15:10:30,708 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#110 Retry#0 Wrote 32 bytes. 2015-04-28 15:10:30,716 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #111 2015-04-28 15:10:30,717 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#111 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:30,717 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:30,717 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:30,718 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#111 Retry#0 2015-04-28 15:10:30,718 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#111 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:30,719 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #112 2015-04-28 15:10:30,720 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#112 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:30,720 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:30,720 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:30,721 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#112 Retry#0 2015-04-28 15:10:30,721 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#112 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:31,405 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #21 2015-04-28 15:10:31,420 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #21 2015-04-28 15:10:31,420 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 15ms 2015-04-28 15:10:31,432 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: headroom= 2015-04-28 15:10:31,432 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000002, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ] 2015-04-28 15:10:31,432 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000003, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] 2015-04-28 15:10:31,432 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 2 2015-04-28 15:10:31,432 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000002 with priority 20 to NM host-IP143:64318 2015-04-28 15:10:31,432 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000003 with priority 20 to NM host-IP117:64318 2015-04-28 15:10:31,432 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:31,433 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=16 #asks=0 2015-04-28 15:10:31,433 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=15 #asks=1 2015-04-28 15:10:31,433 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=16 #asks=1 2015-04-28 15:10:31,433 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=15 #asks=2 2015-04-28 15:10:31,433 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=16 #asks=2 2015-04-28 15:10:31,433 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=15 #asks=3 2015-04-28 15:10:31,433 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=16 #asks=3 2015-04-28 15:10:31,433 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=15 #asks=4 2015-04-28 15:10:31,433 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:31,433 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000000_0 of type TA_ASSIGNED 2015-04-28 15:10:31,433 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000002 to attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:31,434 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000002, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ]) to task attempt_1430213948957_0001_m_000000_0 on node host-IP143:64318 2015-04-28 15:10:31,434 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP143 2015-04-28 15:10:31,434 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:31,434 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:31,435 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=15 #asks=4 2015-04-28 15:10:31,435 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=14 #asks=4 2015-04-28 15:10:31,435 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=15 #asks=4 2015-04-28 15:10:31,435 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=14 #asks=4 2015-04-28 15:10:31,435 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=15 #asks=4 2015-04-28 15:10:31,435 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=14 #asks=4 2015-04-28 15:10:31,435 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=15 #asks=4 2015-04-28 15:10:31,435 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=14 #asks=4 2015-04-28 15:10:31,435 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000003 to attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:31,435 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000003, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ]) to task attempt_1430213948957_0001_m_000001_0 on node host-IP117:64318 2015-04-28 15:10:31,435 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP117 2015-04-28 15:10:31,435 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:31,435 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:31,435 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:14 ScheduledReds:0 AssignedMaps:2 AssignedReds:0 CompletedMaps:0 CompletedReds:0 ContAlloc:2 ContRel:0 HostLocal:2 RackLocal:0 2015-04-28 15:10:31,447 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:31,481 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:31,482 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #22 2015-04-28 15:10:31,484 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #22 2015-04-28 15:10:31,484 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:31,485 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #23 2015-04-28 15:10:31,486 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #23 2015-04-28 15:10:31,486 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms 2015-04-28 15:10:31,496 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: The job-jar file on the remote FS is hdfs://hacluster/staging-dir/dsperf/.staging/job_1430213948957_0001/job.jar 2015-04-28 15:10:31,497 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #24 2015-04-28 15:10:31,498 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #24 2015-04-28 15:10:31,498 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:31,498 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #25 2015-04-28 15:10:31,500 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #25 2015-04-28 15:10:31,500 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:31,500 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: The job-conf file on the remote FS is /staging-dir/dsperf/.staging/job_1430213948957_0001/job.xml 2015-04-28 15:10:31,501 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Adding #0 tokens and #1 secret keys for NM use for launching container 2015-04-28 15:10:31,501 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Size of containertokens_dob is 1 2015-04-28 15:10:31,502 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Putting shuffle token in serviceData 2015-04-28 15:10:31,521 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000000_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:31,521 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:31,521 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:31,523 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:31,523 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000001_0 of type TA_ASSIGNED 2015-04-28 15:10:31,523 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:31,523 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:31,523 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000001_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:31,523 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:31,524 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:31,524 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000002 taskAttempt attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:31,524 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:31,524 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000003 taskAttempt attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:31,524 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:31,525 INFO [ContainerLauncher #0] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000002 taskAttempt attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:31,525 INFO [ContainerLauncher #1] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000003 taskAttempt attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:31,527 INFO [ContainerLauncher #0] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:31,527 INFO [ContainerLauncher #1] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:31,528 INFO [ContainerLauncher #0] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:31,536 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:31,538 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:31,538 DEBUG [ContainerLauncher #0] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:31,542 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:31,551 INFO [ContainerLauncher #1] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:31,552 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:31,552 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:31,552 DEBUG [ContainerLauncher #1] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:31,552 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:31,569 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:31,569 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:31,569 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:31,569 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:31,570 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:31,570 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:31,570 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:31,570 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:31,571 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"VObOub/9jMcfKS2wcqTRapb4FiOwUDQ+6gyYblhv\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:31,572 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@38adb99b 2015-04-28 15:10:31,573 INFO [ContainerLauncher #0] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:31,573 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:31,574 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:31,574 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:31,574 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:31,574 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:31,575 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"VObOub/9jMcfKS2wcqTRapb4FiOwUDQ+6gyYblhv\",nc=00000001,cnonce=\"antQZylVk6gIVx4JDok704slOy+95C6+udVAMVut\",digest-uri=\"/default\",maxbuf=65536,response=def756ac08af968ee24067489de1f885,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:31,579 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=911b8a6c8ef87963b72f301e6235a754" 2015-04-28 15:10:31,579 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:31,580 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 4 2015-04-28 15:10:31,583 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #27 2015-04-28 15:10:31,591 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #27 2015-04-28 15:10:31,592 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:31,592 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 3 2015-04-28 15:10:31,592 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 23ms 2015-04-28 15:10:31,599 INFO [ContainerLauncher #0] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000000_0 : 13562 2015-04-28 15:10:31,599 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:31,599 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000000_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:31,600 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000000_0] using containerId: [container_1430213948957_0001_01_000002 on NM: [host-IP143:64318] 2015-04-28 15:10:31,604 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000000_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:31,604 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:31,604 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:31,604 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:31,605 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:31,605 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:31,605 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000000 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:31,605 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000000 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:31,605 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:31,605 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:31,637 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"MKIsNwQMl1xgRcVvaxdT4sMmZRuml21lF/4Mq3zl\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:31,637 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@4d4e2286 2015-04-28 15:10:31,638 INFO [ContainerLauncher #1] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:31,638 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:31,639 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:31,639 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:31,639 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:31,639 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:31,640 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"MKIsNwQMl1xgRcVvaxdT4sMmZRuml21lF/4Mq3zl\",nc=00000001,cnonce=\"8R+s4BED/uOhCqmTWe+YmhiN6TTZ6nJiy3XdrRbI\",digest-uri=\"/default\",maxbuf=65536,response=71596eb93fcda639d4be56f01c0f646b,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:31,678 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=8b662caae6866d58114f01fb7dc8ef54" 2015-04-28 15:10:31,679 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:31,681 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #26 2015-04-28 15:10:31,681 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:31,723 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #113 2015-04-28 15:10:31,723 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#113 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:31,723 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:31,723 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 0 2015-04-28 15:10:31,724 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#113 Retry#0 2015-04-28 15:10:31,724 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#113 Retry#0 Wrote 32 bytes. 2015-04-28 15:10:31,725 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #114 2015-04-28 15:10:31,726 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#114 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:31,726 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:31,726 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:31,726 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#114 Retry#0 2015-04-28 15:10:31,727 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#114 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:31,728 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #115 2015-04-28 15:10:31,728 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#115 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:31,729 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:31,729 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:31,729 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#115 Retry#0 2015-04-28 15:10:31,729 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#115 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:31,832 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #26 2015-04-28 15:10:31,832 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:31,832 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:31,832 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 263ms 2015-04-28 15:10:31,833 INFO [ContainerLauncher #1] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000001_0 : 13562 2015-04-28 15:10:31,833 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:31,833 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000001_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:31,833 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000001_0] using containerId: [container_1430213948957_0001_01_000003 on NM: [host-IP117:64318] 2015-04-28 15:10:31,833 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000001_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:31,834 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:31,834 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:31,835 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:31,835 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:31,835 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:31,836 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000001 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:31,836 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000001 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:31,836 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:31,836 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:32,436 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #28 2015-04-28 15:10:32,446 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #28 2015-04-28 15:10:32,446 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 10ms 2015-04-28 15:10:32,447 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=2 finishedContainers=0 resourcelimit= knownNMs=2 2015-04-28 15:10:32,447 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: headroom= 2015-04-28 15:10:32,447 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000004, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ] 2015-04-28 15:10:32,447 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000005, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] 2015-04-28 15:10:32,447 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 2 2015-04-28 15:10:32,447 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000004 with priority 20 to NM host-IP143:64318 2015-04-28 15:10:32,447 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000005 with priority 20 to NM host-IP117:64318 2015-04-28 15:10:32,447 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:32,447 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:32,447 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=14 #asks=0 2015-04-28 15:10:32,447 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=13 #asks=1 2015-04-28 15:10:32,447 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=14 #asks=1 2015-04-28 15:10:32,447 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=13 #asks=2 2015-04-28 15:10:32,447 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=14 #asks=2 2015-04-28 15:10:32,448 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=13 #asks=3 2015-04-28 15:10:32,448 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=14 #asks=3 2015-04-28 15:10:32,448 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=13 #asks=4 2015-04-28 15:10:32,448 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:32,448 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000004 to attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:32,448 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000002_0 of type TA_ASSIGNED 2015-04-28 15:10:32,448 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000004, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ]) to task attempt_1430213948957_0001_m_000002_0 on node host-IP143:64318 2015-04-28 15:10:32,448 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP143 2015-04-28 15:10:32,448 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:32,448 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:32,449 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000002_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:32,449 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:32,449 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:32,449 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000004 taskAttempt attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:32,449 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:32,448 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:32,451 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:32,451 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=13 #asks=4 2015-04-28 15:10:32,451 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=12 #asks=4 2015-04-28 15:10:32,451 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=13 #asks=4 2015-04-28 15:10:32,451 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=12 #asks=4 2015-04-28 15:10:32,451 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=13 #asks=4 2015-04-28 15:10:32,451 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=12 #asks=4 2015-04-28 15:10:32,451 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=13 #asks=4 2015-04-28 15:10:32,451 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=12 #asks=4 2015-04-28 15:10:32,451 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000005 to attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:32,451 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000005, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ]) to task attempt_1430213948957_0001_m_000003_0 on node host-IP117:64318 2015-04-28 15:10:32,451 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP117 2015-04-28 15:10:32,451 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:32,451 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:32,451 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:12 ScheduledReds:0 AssignedMaps:4 AssignedReds:0 CompletedMaps:0 CompletedReds:0 ContAlloc:4 ContRel:0 HostLocal:4 RackLocal:0 2015-04-28 15:10:32,455 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:32,455 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000003_0 of type TA_ASSIGNED 2015-04-28 15:10:32,455 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:32,456 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:32,456 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000003_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:32,456 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:32,456 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:32,456 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000005 taskAttempt attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:32,456 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:32,457 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000004 taskAttempt attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:32,457 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:32,457 INFO [ContainerLauncher #2] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:32,464 INFO [ContainerLauncher #3] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000005 taskAttempt attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:32,464 INFO [ContainerLauncher #3] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:32,473 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:32,473 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:32,473 DEBUG [ContainerLauncher #2] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:32,474 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:32,476 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:32,477 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:32,477 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:32,479 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:32,485 INFO [ContainerLauncher #3] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:32,486 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"wC65A2CBDFLFcfRB6F6XvwhdBGstv5UWRreken5H\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:32,487 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@22d3ef0f 2015-04-28 15:10:32,487 INFO [ContainerLauncher #2] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:32,487 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:32,488 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:32,488 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:32,488 DEBUG [ContainerLauncher #3] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:32,488 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:32,489 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:32,489 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:32,497 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:32,497 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:32,497 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:32,497 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:32,498 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"wC65A2CBDFLFcfRB6F6XvwhdBGstv5UWRreken5H\",nc=00000001,cnonce=\"Vyn5VxvfvdhqkWZioBrVZuV3Sjl8FL2P6jtjw2o2\",digest-uri=\"/default\",maxbuf=65536,response=1764eaaf42ff81bbf50959ae3337ea30,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:32,498 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:32,499 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:32,502 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=4a9dd9ee5cde78ee8b59480c124ab5e5" 2015-04-28 15:10:32,502 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"DzuFqLAAa6SbpwJAd0OtOBYCggtNGxx5CB+Gz7dp\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:32,502 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@5d4dd246 2015-04-28 15:10:32,503 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:32,503 INFO [ContainerLauncher #3] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:32,503 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:32,508 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:32,508 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:32,508 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:32,508 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:32,509 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"DzuFqLAAa6SbpwJAd0OtOBYCggtNGxx5CB+Gz7dp\",nc=00000001,cnonce=\"YVvrD7yL9bKU4jCaysO7tz3K2/vAyjuLjialtsj8\",digest-uri=\"/default\",maxbuf=65536,response=b7e10c7e275e8d7c9cb6f9824c1e8c24,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:32,511 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #29 2015-04-28 15:10:32,513 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=3c101c18d148c1f8000c3249bac8c4a6" 2015-04-28 15:10:32,513 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:32,532 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 4 2015-04-28 15:10:32,534 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #29 2015-04-28 15:10:32,534 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 58ms 2015-04-28 15:10:32,535 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000002_0 : 13562 2015-04-28 15:10:32,535 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:32,535 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000002_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:32,535 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000002_0] using containerId: [container_1430213948957_0001_01_000004 on NM: [host-IP143:64318] 2015-04-28 15:10:32,535 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000002_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:32,535 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:32,535 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:32,535 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:32,536 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:32,536 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:32,536 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000002 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:32,536 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000002 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:32,536 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:32,536 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:32,537 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:32,538 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 3 2015-04-28 15:10:32,538 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:32,544 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #30 2015-04-28 15:10:32,553 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #30 2015-04-28 15:10:32,553 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:32,554 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:32,554 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 65ms 2015-04-28 15:10:32,554 INFO [ContainerLauncher #3] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000003_0 : 13562 2015-04-28 15:10:32,554 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:32,554 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000003_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:32,554 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000003_0] using containerId: [container_1430213948957_0001_01_000005 on NM: [host-IP117:64318] 2015-04-28 15:10:32,555 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000003_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:32,555 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:32,555 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:32,555 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:32,555 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:32,555 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:32,555 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000003 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:32,555 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000003 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:32,555 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:32,555 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:32,732 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #116 2015-04-28 15:10:32,733 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#116 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:32,733 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:32,733 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:10:32,733 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#116 Retry#0 2015-04-28 15:10:32,733 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#116 Retry#0 Wrote 32 bytes. 2015-04-28 15:10:32,736 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #117 2015-04-28 15:10:32,736 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#117 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:32,736 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:32,737 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:32,737 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#117 Retry#0 2015-04-28 15:10:32,737 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#117 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:32,739 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #118 2015-04-28 15:10:32,740 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#118 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:32,740 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:32,740 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:32,741 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#118 Retry#0 2015-04-28 15:10:32,741 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#118 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:33,452 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #31 2015-04-28 15:10:33,462 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #31 2015-04-28 15:10:33,462 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 10ms 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=1 finishedContainers=0 resourcelimit= knownNMs=2 2015-04-28 15:10:33,463 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: headroom= 2015-04-28 15:10:33,463 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000007, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 1 2015-04-28 15:10:33,463 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000007 with priority 20 to NM host-IP117:64318 2015-04-28 15:10:33,463 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:33,463 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=12 #asks=0 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=11 #asks=1 2015-04-28 15:10:33,463 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=12 #asks=1 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=11 #asks=2 2015-04-28 15:10:33,463 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=12 #asks=2 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=11 #asks=3 2015-04-28 15:10:33,463 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=12 #asks=3 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=11 #asks=4 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000007 to attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:33,463 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000007, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ]) to task attempt_1430213948957_0001_m_000004_0 on node host-IP117:64318 2015-04-28 15:10:33,463 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP117 2015-04-28 15:10:33,463 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000004_0 of type TA_ASSIGNED 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:33,463 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:33,463 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:11 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:0 CompletedReds:0 ContAlloc:5 ContRel:0 HostLocal:5 RackLocal:0 2015-04-28 15:10:33,464 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:33,464 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000004_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:33,464 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:33,464 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:33,464 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000007 taskAttempt attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:33,464 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:33,465 INFO [ContainerLauncher #4] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000007 taskAttempt attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:33,465 INFO [ContainerLauncher #4] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:33,465 INFO [ContainerLauncher #4] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:33,466 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:33,466 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:33,466 DEBUG [ContainerLauncher #4] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:33,466 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:33,468 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:33,468 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:33,470 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:33,470 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:33,472 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"K1nNninNUMEJ5fJmAGGZMXHRt3e4wP6PK7WG6CO1\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:33,472 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@2aabcea3 2015-04-28 15:10:33,473 INFO [ContainerLauncher #4] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:33,473 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:33,473 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:33,473 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:33,473 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:33,473 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:33,475 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"K1nNninNUMEJ5fJmAGGZMXHRt3e4wP6PK7WG6CO1\",nc=00000001,cnonce=\"i/AUU1Go1m6oSXNS7JDIVgtCj+zeNjXtGBcwHgIG\",digest-uri=\"/default\",maxbuf=65536,response=02640cd246a528ccac839389312fb42d,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:33,478 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=ab937709a6cd47e8aadea92472b9912c" 2015-04-28 15:10:33,478 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:33,480 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #32 2015-04-28 15:10:33,481 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:33,487 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #32 2015-04-28 15:10:33,487 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:33,487 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:33,487 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 19ms 2015-04-28 15:10:33,487 INFO [ContainerLauncher #4] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000004_0 : 13562 2015-04-28 15:10:33,487 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:33,487 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000004_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:33,488 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000004_0] using containerId: [container_1430213948957_0001_01_000007 on NM: [host-IP117:64318] 2015-04-28 15:10:33,488 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000004_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:33,488 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:33,488 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:33,488 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:33,488 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:33,488 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:33,488 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000004 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:33,488 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000004 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:33,488 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:33,489 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:33,565 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP143:63550; # active connections: 1; # queued calls: 0 2015-04-28 15:10:33,660 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:33,662 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:33,666 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"wkj3kbjn23S4gUH7+06dqKX/Ni/cAkVkYqBzE8tP\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:33,666 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63550 Call#-33 Retry#-1 2015-04-28 15:10:33,666 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63550 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:33,745 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #119 2015-04-28 15:10:33,745 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#119 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:33,745 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:33,745 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 0 2015-04-28 15:10:33,746 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#119 Retry#0 2015-04-28 15:10:33,746 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#119 Retry#0 Wrote 32 bytes. 2015-04-28 15:10:33,751 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #120 2015-04-28 15:10:33,751 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#120 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:33,751 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:33,752 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:33,752 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#120 Retry#0 2015-04-28 15:10:33,752 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#120 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:33,754 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #121 2015-04-28 15:10:33,754 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#121 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:33,754 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:33,755 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:33,755 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#121 Retry#0 2015-04-28 15:10:33,755 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#121 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:33,778 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:33,778 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:33,780 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:33,782 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:33,782 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:33,782 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:33,783 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:33,783 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:33,783 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=e0ec916e9ac991f06f40c6f019157b13" 2015-04-28 15:10:33,783 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63550 Call#-33 Retry#-1 2015-04-28 15:10:33,783 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63550 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:33,799 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:33,800 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:33,800 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:33,805 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@6dd86170), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:33,806 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:33,806 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: commitPending 2015-04-28 15:10:33,812 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: fsError 2015-04-28 15:10:33,812 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: shuffleError 2015-04-28 15:10:33,812 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: getMapCompletionEvents 2015-04-28 15:10:33,812 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: ping 2015-04-28 15:10:33,813 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: reportDiagnosticInfo 2015-04-28 15:10:33,813 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: statusUpdate 2015-04-28 15:10:33,813 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: reportNextRecordRange 2015-04-28 15:10:33,813 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: getTask 2015-04-28 15:10:33,813 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: fatalError 2015-04-28 15:10:33,813 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: done 2015-04-28 15:10:33,813 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.metrics2.lib.MutableRates: canCommit 2015-04-28 15:10:33,813 INFO [IPC Server handler 1 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000002 asked for a task 2015-04-28 15:10:33,813 INFO [IPC Server handler 1 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000002 given task: attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:33,813 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 7 2015-04-28 15:10:33,817 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@6dd86170), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#0 Retry#0 2015-04-28 15:10:33,817 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@6dd86170), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#0 Retry#0 Wrote 364 bytes. 2015-04-28 15:10:34,465 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #33 2015-04-28 15:10:34,470 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #33 2015-04-28 15:10:34,471 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 7ms 2015-04-28 15:10:34,471 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=0 finishedContainers=0 resourcelimit= knownNMs=2 2015-04-28 15:10:34,471 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:34,471 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:34,758 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #122 2015-04-28 15:10:34,768 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#122 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:34,768 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:34,769 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 10 procesingTime= 1 2015-04-28 15:10:34,769 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#122 Retry#0 2015-04-28 15:10:34,769 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#122 Retry#0 Wrote 32 bytes. 2015-04-28 15:10:34,769 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP143:63551; # active connections: 2; # queued calls: 0 2015-04-28 15:10:34,771 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #123 2015-04-28 15:10:34,771 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#123 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:34,771 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:34,772 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:34,772 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#123 Retry#0 2015-04-28 15:10:34,772 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#123 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:34,775 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #124 2015-04-28 15:10:34,776 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#124 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:34,777 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:34,777 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 2 procesingTime= 0 2015-04-28 15:10:34,777 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#124 Retry#0 2015-04-28 15:10:34,777 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#124 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:34,868 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:34,868 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:34,869 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"FNdbU9py4ku+Ns3+Qkc/yJIbvppAZSJz0DdDD5HE\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:34,869 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63551 Call#-33 Retry#-1 2015-04-28 15:10:34,869 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63551 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:34,983 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:34,983 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:34,983 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:34,983 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:34,984 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:34,984 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:34,984 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:34,984 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:34,984 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=f36e58c2734387ea7bade21c84ef901d" 2015-04-28 15:10:34,984 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63551 Call#-33 Retry#-1 2015-04-28 15:10:34,984 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63551 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:35,000 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:35,000 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:35,000 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:35,000 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@42af958a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:35,001 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:35,001 INFO [IPC Server handler 3 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000004 asked for a task 2015-04-28 15:10:35,001 INFO [IPC Server handler 3 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000004 given task: attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:35,001 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:35,002 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@42af958a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#0 Retry#0 2015-04-28 15:10:35,002 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@42af958a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:35,472 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #34 2015-04-28 15:10:35,476 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #34 2015-04-28 15:10:35,476 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 5ms 2015-04-28 15:10:35,476 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:35,476 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:35,612 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP117:16338; # active connections: 3; # queued calls: 0 2015-04-28 15:10:35,744 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP117:16339; # active connections: 4; # queued calls: 0 2015-04-28 15:10:35,766 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:35,766 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:35,767 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"cXZi1uYmyIJveEulpvVF6T7aaysruUlsy9Sg1TH+\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:35,767 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16338 Call#-33 Retry#-1 2015-04-28 15:10:35,767 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16338 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:35,780 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #125 2015-04-28 15:10:35,780 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#125 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:35,780 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:35,780 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 0 2015-04-28 15:10:35,781 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#125 Retry#0 2015-04-28 15:10:35,781 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#125 Retry#0 Wrote 32 bytes. 2015-04-28 15:10:35,782 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #126 2015-04-28 15:10:35,782 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#126 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:35,783 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:35,783 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:35,783 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#126 Retry#0 2015-04-28 15:10:35,783 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#126 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:35,795 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #127 2015-04-28 15:10:35,795 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#127 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:35,795 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:35,796 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:35,796 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#127 Retry#0 2015-04-28 15:10:35,796 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#127 Retry#0 Wrote 266 bytes. 2015-04-28 15:10:35,829 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:35,831 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: statusUpdate(attempt_1430213948957_0001_m_000000_0, org.apache.hadoop.mapred.MapTaskStatus@4c6873af), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:35,831 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:35,832 INFO [IPC Server handler 3 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000000_0 is : 0.0 2015-04-28 15:10:35,835 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 4 2015-04-28 15:10:35,835 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:35,835 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000000_0 of type TA_UPDATE 2015-04-28 15:10:35,835 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000000_0, org.apache.hadoop.mapred.MapTaskStatus@4c6873af), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#5 Retry#0 2015-04-28 15:10:35,835 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000000_0, org.apache.hadoop.mapred.MapTaskStatus@4c6873af), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:35,839 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:35,868 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:35,869 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:35,869 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"CEubgfD3fGfAudCmzvcfmiaKd7LC9FzBEGyQD/Qu\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:35,869 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16339 Call#-33 Retry#-1 2015-04-28 15:10:35,869 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16339 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:35,944 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:35,944 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:35,945 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:35,945 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:35,945 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:35,945 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:35,945 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:35,945 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:35,945 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=5e263c56b0c958087f6acc40f5a7df7d" 2015-04-28 15:10:35,945 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16338 Call#-33 Retry#-1 2015-04-28 15:10:35,946 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16338 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:35,967 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:35,975 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: statusUpdate(attempt_1430213948957_0001_m_000000_0, org.apache.hadoop.mapred.MapTaskStatus@1754d819), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:35,975 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:35,975 INFO [IPC Server handler 0 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000000_0 is : 1.0 2015-04-28 15:10:35,984 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:35,985 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:35,985 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:35,985 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@4d462794), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:35,987 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:35,987 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 12 2015-04-28 15:10:35,987 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:35,988 INFO [IPC Server handler 2 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000003 asked for a task 2015-04-28 15:10:35,988 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000000_0 of type TA_UPDATE 2015-04-28 15:10:35,988 INFO [IPC Server handler 2 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000003 given task: attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:35,988 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 2 procesingTime= 1 2015-04-28 15:10:35,988 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000000_0, org.apache.hadoop.mapred.MapTaskStatus@1754d819), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#7 Retry#0 2015-04-28 15:10:35,988 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@4d462794), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#0 Retry#0 2015-04-28 15:10:35,988 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000000_0, org.apache.hadoop.mapred.MapTaskStatus@1754d819), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:35,988 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@4d462794), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#0 Retry#0 Wrote 365 bytes. 2015-04-28 15:10:35,988 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:35,989 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:35,989 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: done(attempt_1430213948957_0001_m_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:35,990 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:35,990 INFO [IPC Server handler 4 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:35,990 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 1 procesingTime= 0 2015-04-28 15:10:35,990 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:35,990 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to done(attempt_1430213948957_0001_m_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#8 Retry#0 2015-04-28 15:10:35,990 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to done(attempt_1430213948957_0001_m_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63550 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:35,997 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP143:63550. Number of active connections: 3 2015-04-28 15:10:36,001 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000000_0 of type TA_DONE 2015-04-28 15:10:36,002 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000000_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:36,002 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000002 taskAttempt attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:36,002 INFO [ContainerLauncher #5] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000002 taskAttempt attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:36,003 INFO [ContainerLauncher #5] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:36,003 INFO [ContainerLauncher #5] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:36,003 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:36,004 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:36,004 DEBUG [ContainerLauncher #5] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:36,004 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:36,009 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:36,009 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:36,012 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:36,012 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:36,015 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"KlOZixpyMH9nc6okjuzKIwCzXjgq6OInTWRUwOn5\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:36,015 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@536c29f3 2015-04-28 15:10:36,015 INFO [ContainerLauncher #5] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:36,015 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:36,016 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:36,016 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:36,016 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:36,016 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:36,017 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"KlOZixpyMH9nc6okjuzKIwCzXjgq6OInTWRUwOn5\",nc=00000001,cnonce=\"CTnZJOIucq+j5lJznjYMdwz7+9X87WorfiT9p7e5\",digest-uri=\"/default\",maxbuf=65536,response=afc29756537d28add24bd096a6176cde,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:36,019 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=76529fc624a3aa917271ca6624696ecb" 2015-04-28 15:10:36,020 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:36,020 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:36,021 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #35 2015-04-28 15:10:36,035 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:36,035 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:36,036 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:36,036 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:36,036 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:36,036 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:36,036 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:36,036 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:36,037 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=4d72f195ccc564e1978563d7c4f02298" 2015-04-28 15:10:36,037 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16339 Call#-33 Retry#-1 2015-04-28 15:10:36,037 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16339 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:36,043 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #35 2015-04-28 15:10:36,043 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:36,043 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:36,052 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 44ms 2015-04-28 15:10:36,052 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:36,053 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:36,053 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:36,053 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@42a48045), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:36,056 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:36,056 INFO [IPC Server handler 5 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000005 asked for a task 2015-04-28 15:10:36,056 INFO [IPC Server handler 5 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000005 given task: attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:36,056 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 3 procesingTime= 0 2015-04-28 15:10:36,057 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@42a48045), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#0 Retry#0 2015-04-28 15:10:36,057 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@42a48045), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:36,059 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:36,059 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000000_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:36,061 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000000_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:36,061 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:36,061 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:36,061 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:36,061 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:36,061 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:36,061 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000000 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:36,070 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:36,070 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=18282 lastFlushOffset=0 createNewBlock=false 2015-04-28 15:10:36,070 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 0 2015-04-28 15:10:36,070 DEBUG [Thread-54] org.apache.hadoop.hdfs.DFSClient: Allocating new block 2015-04-28 15:10:36,071 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #36 2015-04-28 15:10:36,071 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 0 2015-04-28 15:10:36,073 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000000_0 2015-04-28 15:10:36,074 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000000 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:36,074 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:36,074 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:36,074 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:36,077 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:36,077 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:36,077 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 1 2015-04-28 15:10:36,077 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:36,099 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #36 2015-04-28 15:10:36,099 DEBUG [Thread-54] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: addBlock took 29ms 2015-04-28 15:10:36,099 DEBUG [Thread-54] org.apache.hadoop.hdfs.DFSClient: pipeline = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:36,099 DEBUG [Thread-54] org.apache.hadoop.hdfs.DFSClient: pipeline = DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK] 2015-04-28 15:10:36,099 DEBUG [Thread-54] org.apache.hadoop.hdfs.DFSClient: Connecting to datanode IP143:50076 2015-04-28 15:10:36,099 DEBUG [Thread-54] org.apache.hadoop.hdfs.DFSClient: Send buf size 131072 2015-04-28 15:10:36,099 DEBUG [Thread-54] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient: SASL client skipping handshake in unsecured configuration for addr = /IP143, datanodeId = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:36,118 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 0 offsetInBlock: 0 lastPacketInBlock: false lastByteOffsetInBlock: 18282 2015-04-28 15:10:36,133 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 0 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 14040667 flag: 0 flag: 0 2015-04-28 15:10:36,134 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #37 2015-04-28 15:10:36,157 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #37 2015-04-28 15:10:36,157 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: fsync took 23ms 2015-04-28 15:10:36,158 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:36,159 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:36,162 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=1, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=17920 2015-04-28 15:10:36,162 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:36,162 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=20990 lastFlushOffset=18282 createNewBlock=false 2015-04-28 15:10:36,162 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 1 2015-04-28 15:10:36,162 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 1 2015-04-28 15:10:36,162 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 1 offsetInBlock: 17920 lastPacketInBlock: false lastByteOffsetInBlock: 20990 2015-04-28 15:10:36,165 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 1 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1616748 flag: 0 flag: 0 2015-04-28 15:10:36,166 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:36,476 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:11 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:1 CompletedReds:0 ContAlloc:5 ContRel:0 HostLocal:5 RackLocal:0 2015-04-28 15:10:36,477 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #38 2015-04-28 15:10:36,480 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #38 2015-04-28 15:10:36,480 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 3ms 2015-04-28 15:10:36,481 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:36,481 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:36,760 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:36,760 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: statusUpdate(attempt_1430213948957_0001_m_000002_0, org.apache.hadoop.mapred.MapTaskStatus@13ca7f2f), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:36,760 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:36,761 INFO [IPC Server handler 1 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000002_0 is : 0.0 2015-04-28 15:10:36,761 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP117:16340; # active connections: 4; # queued calls: 0 2015-04-28 15:10:36,761 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:36,761 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:36,762 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000002_0, org.apache.hadoop.mapred.MapTaskStatus@13ca7f2f), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#5 Retry#0 2015-04-28 15:10:36,762 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000002_0 of type TA_UPDATE 2015-04-28 15:10:36,762 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000002_0, org.apache.hadoop.mapred.MapTaskStatus@13ca7f2f), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:36,762 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:36,804 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #128 2015-04-28 15:10:36,804 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#128 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:36,804 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:36,805 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 1 2015-04-28 15:10:36,805 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#128 Retry#0 2015-04-28 15:10:36,805 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#128 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:36,820 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #129 2015-04-28 15:10:36,820 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#129 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:36,820 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:36,821 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:36,822 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#129 Retry#0 2015-04-28 15:10:36,822 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#129 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:36,829 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #130 2015-04-28 15:10:36,829 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#130 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:36,830 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:36,830 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:36,830 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:36,830 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: statusUpdate(attempt_1430213948957_0001_m_000002_0, org.apache.hadoop.mapred.MapTaskStatus@4fb41a2b), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:36,830 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:36,830 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#130 Retry#0 2015-04-28 15:10:36,830 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#130 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:36,831 INFO [IPC Server handler 3 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000002_0 is : 1.0 2015-04-28 15:10:36,832 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 2 2015-04-28 15:10:36,832 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:36,832 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000002_0 of type TA_UPDATE 2015-04-28 15:10:36,833 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:36,833 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000002_0, org.apache.hadoop.mapred.MapTaskStatus@4fb41a2b), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#7 Retry#0 2015-04-28 15:10:36,833 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000002_0, org.apache.hadoop.mapred.MapTaskStatus@4fb41a2b), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:36,836 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:36,837 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: done(attempt_1430213948957_0001_m_000002_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:36,837 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:36,837 INFO [IPC Server handler 0 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:36,837 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 0 procesingTime= 0 2015-04-28 15:10:36,837 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:36,838 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: responding to done(attempt_1430213948957_0001_m_000002_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#8 Retry#0 2015-04-28 15:10:36,838 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: responding to done(attempt_1430213948957_0001_m_000002_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63551 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:36,838 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000002_0 of type TA_DONE 2015-04-28 15:10:36,839 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000002_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:36,839 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000004 taskAttempt attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:36,840 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP143:63551. Number of active connections: 3 2015-04-28 15:10:36,843 INFO [ContainerLauncher #6] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000004 taskAttempt attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:36,844 INFO [ContainerLauncher #6] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:36,844 INFO [ContainerLauncher #6] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:36,844 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:36,845 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:36,845 DEBUG [ContainerLauncher #6] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:36,845 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:36,845 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:36,845 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:36,846 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:36,846 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:36,851 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"DcnhaAS/ibCNBMsgXAUYOQ2pSm27ZPTRrEkm8TwR\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:36,851 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@30b08c77 2015-04-28 15:10:36,851 INFO [ContainerLauncher #6] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:36,851 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:36,852 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:36,852 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:36,852 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:36,852 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:36,852 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"DcnhaAS/ibCNBMsgXAUYOQ2pSm27ZPTRrEkm8TwR\",nc=00000001,cnonce=\"SGgRabFo4pRHGoGZyqitTIq9Isx5HFY5f91lO2ki\",digest-uri=\"/default\",maxbuf=65536,response=0f6394506ce4b6618c62c3cbb863caef,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:36,856 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=37c5d8a51bc621baea04b2a89ba732d6" 2015-04-28 15:10:36,857 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:36,857 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #39 2015-04-28 15:10:36,857 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:36,862 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #39 2015-04-28 15:10:36,862 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:36,862 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:36,862 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 17ms 2015-04-28 15:10:36,862 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:36,862 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000002_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:36,862 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000002_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:36,862 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:36,862 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:36,863 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:36,863 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:36,863 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:36,863 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000002 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:36,863 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000002_0 2015-04-28 15:10:36,863 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000002 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:36,863 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:36,863 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:36,863 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:36,863 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:36,863 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:36,863 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 2 2015-04-28 15:10:36,863 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:36,868 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=2, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=20480 2015-04-28 15:10:36,868 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:36,868 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=24126 lastFlushOffset=20990 createNewBlock=false 2015-04-28 15:10:36,868 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 2 2015-04-28 15:10:36,868 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 2 2015-04-28 15:10:36,868 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 2 offsetInBlock: 20480 lastPacketInBlock: false lastByteOffsetInBlock: 24126 2015-04-28 15:10:36,871 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 2 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1628922 flag: 0 flag: 0 2015-04-28 15:10:36,873 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:36,873 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:36,875 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=3, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=24064 2015-04-28 15:10:36,875 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:36,875 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=26835 lastFlushOffset=24126 createNewBlock=false 2015-04-28 15:10:36,875 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 3 2015-04-28 15:10:36,875 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 3 2015-04-28 15:10:36,875 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 3 offsetInBlock: 24064 lastPacketInBlock: false lastByteOffsetInBlock: 26835 2015-04-28 15:10:36,878 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 3 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1528651 flag: 0 flag: 0 2015-04-28 15:10:36,878 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:36,916 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:36,917 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:36,917 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"eNPUy+i+1/wIHPtmYrj6jKzYeNYE+qTsIlouB1Vi\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:36,917 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16340 Call#-33 Retry#-1 2015-04-28 15:10:36,917 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16340 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:37,084 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:37,084 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:37,084 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:37,085 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:37,085 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:37,085 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:37,085 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:37,085 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:37,085 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=ede84e411d57fcb00a0a5d53186c07e7" 2015-04-28 15:10:37,085 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16340 Call#-33 Retry#-1 2015-04-28 15:10:37,086 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16340 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:37,147 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:37,148 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:37,148 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:37,148 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@35996ad9), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:37,149 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:37,149 INFO [IPC Server handler 9 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000007 asked for a task 2015-04-28 15:10:37,149 INFO [IPC Server handler 9 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000007 given task: attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:37,149 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:37,151 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@35996ad9), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#0 Retry#0 2015-04-28 15:10:37,151 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@35996ad9), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:37,481 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:11 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:2 CompletedReds:0 ContAlloc:5 ContRel:0 HostLocal:5 RackLocal:0 2015-04-28 15:10:37,482 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #40 2015-04-28 15:10:37,488 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #40 2015-04-28 15:10:37,488 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 7ms 2015-04-28 15:10:37,494 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000006, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ] 2015-04-28 15:10:37,494 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000002 2015-04-28 15:10:37,495 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:37,495 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000000_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:37,495 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 1 2015-04-28 15:10:37,496 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:37,496 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000006 with priority 20 to NM host-IP143:64318 2015-04-28 15:10:37,496 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000000_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:37,496 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:37,496 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:37,496 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:37,496 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=11 #asks=0 2015-04-28 15:10:37,496 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000000_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:37,496 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=10 #asks=1 2015-04-28 15:10:37,496 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=11 #asks=1 2015-04-28 15:10:37,496 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=10 #asks=2 2015-04-28 15:10:37,496 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=11 #asks=2 2015-04-28 15:10:37,496 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=10 #asks=3 2015-04-28 15:10:37,496 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=11 #asks=3 2015-04-28 15:10:37,496 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=10 #asks=4 2015-04-28 15:10:37,496 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:37,496 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000006 to attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:37,496 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000005_0 of type TA_ASSIGNED 2015-04-28 15:10:37,497 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000006, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ]) to task attempt_1430213948957_0001_m_000005_0 on node host-IP143:64318 2015-04-28 15:10:37,497 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:37,497 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP143 2015-04-28 15:10:37,497 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:37,497 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:37,497 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:10 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:2 CompletedReds:0 ContAlloc:6 ContRel:0 HostLocal:6 RackLocal:0 2015-04-28 15:10:37,497 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:37,498 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000005_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:37,498 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:37,498 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:37,498 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000006 taskAttempt attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:37,498 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:37,499 INFO [ContainerLauncher #7] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000006 taskAttempt attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:37,499 INFO [ContainerLauncher #7] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:37,499 INFO [ContainerLauncher #7] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:37,504 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:37,504 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:37,504 DEBUG [ContainerLauncher #7] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:37,504 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:37,505 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:37,505 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:37,505 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:37,505 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:37,507 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"Hqj17ILLI/ABrdKv7zAmEM/keu5kofDxzRhqO35G\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:37,507 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@536f5e61 2015-04-28 15:10:37,510 INFO [ContainerLauncher #7] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:37,510 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:37,510 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:37,510 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:37,510 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:37,510 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:37,511 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"Hqj17ILLI/ABrdKv7zAmEM/keu5kofDxzRhqO35G\",nc=00000001,cnonce=\"OJRIwm2iiGb3HLgEa+KpoyUBNSjD5jZu2dy/cMAw\",digest-uri=\"/default\",maxbuf=65536,response=6ff9e236374316582b90b4c828b6bdca,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:37,513 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=928de51162ba00f06f6d6b80ec3db20e" 2015-04-28 15:10:37,513 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:37,515 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #41 2015-04-28 15:10:37,515 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:37,526 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #41 2015-04-28 15:10:37,527 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:37,527 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:37,527 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 23ms 2015-04-28 15:10:37,527 INFO [ContainerLauncher #7] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000005_0 : 13562 2015-04-28 15:10:37,527 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:37,527 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000005_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:37,527 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000005_0] using containerId: [container_1430213948957_0001_01_000006 on NM: [host-IP143:64318] 2015-04-28 15:10:37,528 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000005_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:37,528 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:37,528 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:37,528 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:37,528 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:37,528 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:37,528 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000005 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:37,528 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000005 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:37,528 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:37,528 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=4, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=26624 2015-04-28 15:10:37,528 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:37,846 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #131 2015-04-28 15:10:37,846 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#131 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:37,846 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:37,846 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 0 2015-04-28 15:10:37,847 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#131 Retry#0 2015-04-28 15:10:37,847 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#131 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:37,849 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #132 2015-04-28 15:10:37,849 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#132 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:37,849 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:37,849 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:37,849 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#132 Retry#0 2015-04-28 15:10:37,850 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#132 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:37,853 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #133 2015-04-28 15:10:37,853 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#133 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:37,854 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:37,854 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:37,854 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#133 Retry#0 2015-04-28 15:10:37,854 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#133 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:38,496 DEBUG [IPC Server idle connection scanner for port 48332] org.apache.hadoop.ipc.Server: IPC Server idle connection scanner for port 48332: task running 2015-04-28 15:10:38,498 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #42 2015-04-28 15:10:38,516 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #42 2015-04-28 15:10:38,516 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 19ms 2015-04-28 15:10:38,516 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=1 finishedContainers=1 resourcelimit= knownNMs=2 2015-04-28 15:10:38,517 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000009, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ] 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000004 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 1 2015-04-28 15:10:38,517 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:38,517 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000009 with priority 20 to NM host-IP143:64318 2015-04-28 15:10:38,517 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000002_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:38,517 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:38,517 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:38,517 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=10 #asks=0 2015-04-28 15:10:38,517 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000002_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=9 #asks=1 2015-04-28 15:10:38,517 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000002_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:38,517 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=10 #asks=1 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=9 #asks=2 2015-04-28 15:10:38,517 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=10 #asks=2 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=9 #asks=3 2015-04-28 15:10:38,517 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=10 #asks=3 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=9 #asks=4 2015-04-28 15:10:38,517 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000009 to attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:38,517 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000006_0 of type TA_ASSIGNED 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000009, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ]) to task attempt_1430213948957_0001_m_000006_0 on node host-IP143:64318 2015-04-28 15:10:38,517 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP143 2015-04-28 15:10:38,517 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:38,517 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:9 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:2 CompletedReds:0 ContAlloc:7 ContRel:0 HostLocal:7 RackLocal:0 2015-04-28 15:10:38,518 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:38,518 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000006_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:38,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:38,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:38,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000009 taskAttempt attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:38,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:38,529 INFO [ContainerLauncher #8] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000009 taskAttempt attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:38,529 INFO [ContainerLauncher #8] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:38,529 INFO [ContainerLauncher #8] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:38,530 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:38,530 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:38,530 DEBUG [ContainerLauncher #8] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:38,531 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:38,532 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:38,532 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:38,532 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:38,533 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:38,537 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"TpuAuOXMhCUy0U7DE40w4IIONpSkHMQcqFPZcJTo\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:38,537 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@2eb2d30f 2015-04-28 15:10:38,539 INFO [ContainerLauncher #8] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:38,539 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:38,540 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:38,540 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:38,540 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:38,540 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:38,545 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"TpuAuOXMhCUy0U7DE40w4IIONpSkHMQcqFPZcJTo\",nc=00000001,cnonce=\"r7/i52TnbuuVmkjcquqEvf0aSf8CUmfV70h8505T\",digest-uri=\"/default\",maxbuf=65536,response=4c0dc088763b74fda661b697103c00c5,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:38,550 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=5cd744cbdd79a0bcb17bed3212be2e45" 2015-04-28 15:10:38,551 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:38,554 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #43 2015-04-28 15:10:38,554 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:38,562 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #43 2015-04-28 15:10:38,562 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 31ms 2015-04-28 15:10:38,562 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:38,562 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:38,562 INFO [ContainerLauncher #8] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000006_0 : 13562 2015-04-28 15:10:38,563 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:38,563 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000006_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:38,563 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000006_0] using containerId: [container_1430213948957_0001_01_000009 on NM: [host-IP143:64318] 2015-04-28 15:10:38,563 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000006_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:38,563 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:38,563 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:38,563 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:38,563 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:38,563 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:38,563 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000006 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:38,563 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000006 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:38,563 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:38,564 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:38,750 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:38,750 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: statusUpdate(attempt_1430213948957_0001_m_000001_0, org.apache.hadoop.mapred.MapTaskStatus@745d075c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:38,750 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:38,751 INFO [IPC Server handler 1 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000001_0 is : 0.0 2015-04-28 15:10:38,751 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 1 procesingTime= 0 2015-04-28 15:10:38,751 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:38,752 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000001_0 of type TA_UPDATE 2015-04-28 15:10:38,752 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:38,752 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000001_0, org.apache.hadoop.mapred.MapTaskStatus@745d075c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#5 Retry#0 2015-04-28 15:10:38,752 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000001_0, org.apache.hadoop.mapred.MapTaskStatus@745d075c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:38,857 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #134 2015-04-28 15:10:38,878 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#134 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:38,878 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:38,879 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 1 2015-04-28 15:10:38,879 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#134 Retry#0 2015-04-28 15:10:38,879 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#134 Retry#0 Wrote 33 bytes. 2015-04-28 15:10:38,881 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #135 2015-04-28 15:10:38,881 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#135 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:38,881 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:38,881 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:38,882 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#135 Retry#0 2015-04-28 15:10:38,882 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#135 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:38,884 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #136 2015-04-28 15:10:38,884 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#136 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:38,884 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:38,884 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:38,884 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#136 Retry#0 2015-04-28 15:10:38,885 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#136 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:38,916 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:38,916 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: statusUpdate(attempt_1430213948957_0001_m_000001_0, org.apache.hadoop.mapred.MapTaskStatus@4381c04c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:38,916 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:38,917 INFO [IPC Server handler 2 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000001_0 is : 1.0 2015-04-28 15:10:38,918 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 2 2015-04-28 15:10:38,918 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000001_0, org.apache.hadoop.mapred.MapTaskStatus@4381c04c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#7 Retry#0 2015-04-28 15:10:38,918 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000001_0, org.apache.hadoop.mapred.MapTaskStatus@4381c04c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:38,918 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:38,919 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000001_0 of type TA_UPDATE 2015-04-28 15:10:38,919 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:38,920 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:38,920 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: done(attempt_1430213948957_0001_m_000001_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:38,920 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:38,920 INFO [IPC Server handler 4 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:38,921 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 0 procesingTime= 1 2015-04-28 15:10:38,921 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to done(attempt_1430213948957_0001_m_000001_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#8 Retry#0 2015-04-28 15:10:38,921 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to done(attempt_1430213948957_0001_m_000001_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16338 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:38,921 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:38,921 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000001_0 of type TA_DONE 2015-04-28 15:10:38,922 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000001_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:38,922 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000003 taskAttempt attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:38,922 INFO [ContainerLauncher #9] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000003 taskAttempt attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:38,923 INFO [ContainerLauncher #9] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:38,923 INFO [ContainerLauncher #9] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:38,923 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:38,923 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:38,924 DEBUG [ContainerLauncher #9] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:38,924 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:38,924 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:38,924 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:38,925 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:38,926 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:38,927 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP117:16338. Number of active connections: 2 2015-04-28 15:10:38,930 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"irRhwjDv67f3VJRhj6vEYyyRD0IWvl1oHcfnZRqN\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:38,930 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@2248b30c 2015-04-28 15:10:38,931 INFO [ContainerLauncher #9] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:38,931 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:38,931 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:38,931 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:38,931 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:38,931 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:38,932 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"irRhwjDv67f3VJRhj6vEYyyRD0IWvl1oHcfnZRqN\",nc=00000001,cnonce=\"dOXv7SI6JQ6ke8Akb6SxNpW16VfhZs6bke+vqCsg\",digest-uri=\"/default\",maxbuf=65536,response=d3507fd1bb3a4be7cd73c27118cf32af,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:38,936 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=a36c062ecfae1dff0206f36c8c014960" 2015-04-28 15:10:38,936 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:38,939 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #44 2015-04-28 15:10:38,939 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:38,973 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #44 2015-04-28 15:10:38,973 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 49ms 2015-04-28 15:10:38,973 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:38,973 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:38,973 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:38,973 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000001_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:38,974 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000001_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:38,974 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000001 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:38,974 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000001_0 2015-04-28 15:10:38,974 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000001 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:38,974 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 3 2015-04-28 15:10:38,974 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:38,976 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:38,976 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=30862 lastFlushOffset=26835 createNewBlock=false 2015-04-28 15:10:38,976 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 4 2015-04-28 15:10:38,976 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 4 2015-04-28 15:10:38,983 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 4 offsetInBlock: 26624 lastPacketInBlock: false lastByteOffsetInBlock: 30862 2015-04-28 15:10:38,986 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:38,987 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: statusUpdate(attempt_1430213948957_0001_m_000003_0, org.apache.hadoop.mapred.MapTaskStatus@4c637661), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:38,987 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:38,988 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 4 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 3675752 flag: 0 flag: 0 2015-04-28 15:10:38,988 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:38,988 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:38,990 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=5, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=30720 2015-04-28 15:10:38,990 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:38,990 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=33571 lastFlushOffset=30862 createNewBlock=false 2015-04-28 15:10:38,990 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 5 2015-04-28 15:10:38,990 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 5 2015-04-28 15:10:38,990 INFO [IPC Server handler 5 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000003_0 is : 0.0 2015-04-28 15:10:38,991 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 4 2015-04-28 15:10:38,991 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000003_0, org.apache.hadoop.mapred.MapTaskStatus@4c637661), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#5 Retry#0 2015-04-28 15:10:38,991 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000003_0, org.apache.hadoop.mapred.MapTaskStatus@4c637661), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:38,991 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 5 offsetInBlock: 30720 lastPacketInBlock: false lastByteOffsetInBlock: 33571 2015-04-28 15:10:38,993 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:38,994 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 5 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1613492 flag: 0 flag: 0 2015-04-28 15:10:38,994 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:38,994 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000003_0 of type TA_UPDATE 2015-04-28 15:10:38,994 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:39,011 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP143:63563; # active connections: 3; # queued calls: 0 2015-04-28 15:10:39,135 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:39,135 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:39,136 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"29FpiKSh19Je3SrmnPoqxTHLhYCsewADG3uG6Smn\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:39,136 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63563 Call#-33 Retry#-1 2015-04-28 15:10:39,136 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63563 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:39,164 DEBUG [IPC Server idle connection scanner for port 21207] org.apache.hadoop.ipc.Server: IPC Server idle connection scanner for port 21207: task running 2015-04-28 15:10:39,207 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:39,208 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: statusUpdate(attempt_1430213948957_0001_m_000003_0, org.apache.hadoop.mapred.MapTaskStatus@420d67cf), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:39,208 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:39,209 INFO [IPC Server handler 1 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000003_0 is : 1.0 2015-04-28 15:10:39,224 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 16 2015-04-28 15:10:39,224 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000003_0, org.apache.hadoop.mapred.MapTaskStatus@420d67cf), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#7 Retry#0 2015-04-28 15:10:39,224 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000003_0, org.apache.hadoop.mapred.MapTaskStatus@420d67cf), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:39,224 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:39,225 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000003_0 of type TA_UPDATE 2015-04-28 15:10:39,225 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:39,226 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:39,227 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: done(attempt_1430213948957_0001_m_000003_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:39,227 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:39,227 INFO [IPC Server handler 3 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:39,227 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 1 procesingTime= 0 2015-04-28 15:10:39,227 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:39,228 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000003_0 of type TA_DONE 2015-04-28 15:10:39,228 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: responding to done(attempt_1430213948957_0001_m_000003_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#8 Retry#0 2015-04-28 15:10:39,228 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: responding to done(attempt_1430213948957_0001_m_000003_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16339 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:39,228 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000003_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:39,228 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000005 taskAttempt attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:39,228 INFO [ContainerLauncher #0] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000005 taskAttempt attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:39,228 INFO [ContainerLauncher #0] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:39,229 INFO [ContainerLauncher #0] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:39,229 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:39,229 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:39,229 DEBUG [ContainerLauncher #0] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:39,230 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:39,230 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:39,230 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:39,231 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:39,231 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:39,231 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP117:16339. Number of active connections: 2 2015-04-28 15:10:39,233 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"zZO8cMfVMFeHgM0OhybrNX+vJnFI5ARNzmqfOHmk\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:39,233 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@37f223cf 2015-04-28 15:10:39,233 INFO [ContainerLauncher #0] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:39,233 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:39,234 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:39,234 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:39,234 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:39,234 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:39,234 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"zZO8cMfVMFeHgM0OhybrNX+vJnFI5ARNzmqfOHmk\",nc=00000001,cnonce=\"5vaonILbMgsyTioU2mOljfYZSSlG1kfli1lYiV+q\",digest-uri=\"/default\",maxbuf=65536,response=b8cac061a30d8f085f6c54bcf46b546d,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:39,238 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=6e291b77fde5b2a1775ae8e61a0733c7" 2015-04-28 15:10:39,238 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:39,244 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:39,244 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #45 2015-04-28 15:10:39,268 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #45 2015-04-28 15:10:39,268 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 38ms 2015-04-28 15:10:39,268 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:39,268 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000003_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:39,269 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000003_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000003 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:39,269 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:39,269 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000003_0 2015-04-28 15:10:39,269 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000003 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:39,269 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:39,270 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:39,270 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 4 2015-04-28 15:10:39,270 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:39,271 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=6, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=33280 2015-04-28 15:10:39,272 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:39,272 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=36730 lastFlushOffset=33571 createNewBlock=false 2015-04-28 15:10:39,272 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 6 2015-04-28 15:10:39,272 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 6 2015-04-28 15:10:39,272 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 6 offsetInBlock: 33280 lastPacketInBlock: false lastByteOffsetInBlock: 36730 2015-04-28 15:10:39,275 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:39,275 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:39,276 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:39,276 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:39,276 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:39,276 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:39,276 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:39,276 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:39,276 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=bab84cff0a669c6b4be86290d1d86349" 2015-04-28 15:10:39,277 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63563 Call#-33 Retry#-1 2015-04-28 15:10:39,277 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63563 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:39,279 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 6 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 5525667 flag: 0 flag: 0 2015-04-28 15:10:39,279 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:39,279 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:39,281 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=7, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=36352 2015-04-28 15:10:39,282 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:39,282 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=39439 lastFlushOffset=36730 createNewBlock=false 2015-04-28 15:10:39,282 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 7 2015-04-28 15:10:39,282 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 7 2015-04-28 15:10:39,282 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 7 offsetInBlock: 36352 lastPacketInBlock: false lastByteOffsetInBlock: 39439 2015-04-28 15:10:39,287 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 7 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 3664547 flag: 0 flag: 0 2015-04-28 15:10:39,287 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:39,292 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:39,292 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:39,292 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:39,293 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@2ae4f655), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:39,293 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:39,293 INFO [IPC Server handler 0 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000006 asked for a task 2015-04-28 15:10:39,293 INFO [IPC Server handler 0 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000006 given task: attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:39,293 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:39,296 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@2ae4f655), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#0 Retry#0 2015-04-28 15:10:39,296 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@2ae4f655), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:39,518 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:9 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:4 CompletedReds:0 ContAlloc:7 ContRel:0 HostLocal:7 RackLocal:0 2015-04-28 15:10:39,518 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #46 2015-04-28 15:10:39,525 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #46 2015-04-28 15:10:39,525 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 7ms 2015-04-28 15:10:39,526 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=1 finishedContainers=1 resourcelimit= knownNMs=2 2015-04-28 15:10:39,526 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000008, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] 2015-04-28 15:10:39,526 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000003 2015-04-28 15:10:39,526 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 1 2015-04-28 15:10:39,526 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000008 with priority 20 to NM host-IP117:64318 2015-04-28 15:10:39,526 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:39,526 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:39,526 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:39,526 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=9 #asks=0 2015-04-28 15:10:39,526 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=8 #asks=1 2015-04-28 15:10:39,526 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=9 #asks=1 2015-04-28 15:10:39,526 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=8 #asks=2 2015-04-28 15:10:39,526 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=9 #asks=2 2015-04-28 15:10:39,526 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=8 #asks=3 2015-04-28 15:10:39,526 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=9 #asks=3 2015-04-28 15:10:39,526 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=8 #asks=4 2015-04-28 15:10:39,527 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000008 to attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:39,527 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:39,527 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000001_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:39,527 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:39,527 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000001_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:39,527 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000001_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:39,527 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000008, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ]) to task attempt_1430213948957_0001_m_000007_0 on node host-IP117:64318 2015-04-28 15:10:39,527 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:39,527 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP117 2015-04-28 15:10:39,527 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000007_0 of type TA_ASSIGNED 2015-04-28 15:10:39,527 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:39,527 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:39,527 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:39,527 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:8 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:4 CompletedReds:0 ContAlloc:8 ContRel:0 HostLocal:8 RackLocal:0 2015-04-28 15:10:39,527 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:39,528 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000007_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:39,528 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:39,528 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:39,528 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000008 taskAttempt attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:39,528 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:39,529 INFO [ContainerLauncher #1] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000008 taskAttempt attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:39,529 INFO [ContainerLauncher #1] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:39,529 INFO [ContainerLauncher #1] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:39,530 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:39,530 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:39,530 DEBUG [ContainerLauncher #1] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:39,530 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:39,535 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:39,535 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:39,536 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:39,537 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:39,539 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"o4RISF8YR7uEvAXYKYhPdybaJEWof2Ogyf2xaI6k\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:39,539 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@6d832089 2015-04-28 15:10:39,539 INFO [ContainerLauncher #1] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:39,539 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:39,540 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:39,540 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:39,540 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:39,540 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:39,540 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"o4RISF8YR7uEvAXYKYhPdybaJEWof2Ogyf2xaI6k\",nc=00000001,cnonce=\"D/93eYLYRjaLpIOtPYPvstmqhvAU4z5zczy7daBO\",digest-uri=\"/default\",maxbuf=65536,response=ce34fe02d5e496feca0382c9b3ed1ec4,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:39,545 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=0992a67f21e75337ffaaa965e675acea" 2015-04-28 15:10:39,546 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:39,563 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #47 2015-04-28 15:10:39,563 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:39,569 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #47 2015-04-28 15:10:39,569 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:39,569 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:39,569 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 34ms 2015-04-28 15:10:39,569 INFO [ContainerLauncher #1] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000007_0 : 13562 2015-04-28 15:10:39,570 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:39,570 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000007_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:39,570 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000007_0] using containerId: [container_1430213948957_0001_01_000008 on NM: [host-IP117:64318] 2015-04-28 15:10:39,570 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000007_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:39,570 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:39,570 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:39,570 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:39,570 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:39,570 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:39,570 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:39,570 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000007 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:39,570 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000007 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:39,570 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:39,686 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:39,686 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: statusUpdate(attempt_1430213948957_0001_m_000004_0, org.apache.hadoop.mapred.MapTaskStatus@47be4ce4), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:39,686 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:39,688 INFO [IPC Server handler 2 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000004_0 is : 0.0 2015-04-28 15:10:39,688 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 1 procesingTime= 1 2015-04-28 15:10:39,688 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000004_0, org.apache.hadoop.mapred.MapTaskStatus@47be4ce4), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#5 Retry#0 2015-04-28 15:10:39,688 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000004_0, org.apache.hadoop.mapred.MapTaskStatus@47be4ce4), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:39,688 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:39,689 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000004_0 of type TA_UPDATE 2015-04-28 15:10:39,689 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:39,843 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:39,844 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: statusUpdate(attempt_1430213948957_0001_m_000004_0, org.apache.hadoop.mapred.MapTaskStatus@6985db72), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:39,844 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:39,845 INFO [IPC Server handler 4 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000004_0 is : 1.0 2015-04-28 15:10:39,846 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 2 2015-04-28 15:10:39,846 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:39,846 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000004_0 of type TA_UPDATE 2015-04-28 15:10:39,847 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000004_0, org.apache.hadoop.mapred.MapTaskStatus@6985db72), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#7 Retry#0 2015-04-28 15:10:39,847 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:39,847 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000004_0, org.apache.hadoop.mapred.MapTaskStatus@6985db72), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:39,848 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:39,848 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: done(attempt_1430213948957_0001_m_000004_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:39,849 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:39,849 INFO [IPC Server handler 5 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:39,849 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 1 procesingTime= 0 2015-04-28 15:10:39,849 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:39,849 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000004_0 of type TA_DONE 2015-04-28 15:10:39,855 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to done(attempt_1430213948957_0001_m_000004_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#8 Retry#0 2015-04-28 15:10:39,855 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to done(attempt_1430213948957_0001_m_000004_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16340 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:39,856 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000004_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:39,857 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000007 taskAttempt attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:39,857 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000007 taskAttempt attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:39,857 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:39,857 INFO [ContainerLauncher #2] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:39,857 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP117:16340. Number of active connections: 1 2015-04-28 15:10:39,858 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:39,858 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:39,858 DEBUG [ContainerLauncher #2] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:39,858 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:39,858 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:39,859 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:39,859 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:39,860 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:39,864 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"RgAMIvSGzhkzF8sfaVLzr4Xmg6qk7u6mkCMenwT/\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:39,864 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@3f913e30 2015-04-28 15:10:39,864 INFO [ContainerLauncher #2] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:39,864 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:39,865 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:39,868 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:39,868 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:39,868 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:39,868 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"RgAMIvSGzhkzF8sfaVLzr4Xmg6qk7u6mkCMenwT/\",nc=00000001,cnonce=\"bIl5EmZ+jcLc7jshAXDAvZar1li4CdyQ6Nx3ii33\",digest-uri=\"/default\",maxbuf=65536,response=aaa020fc9b2d33d27ea0730a327a6767,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:39,872 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=14eadc2ea104d0029bd2953101607c68" 2015-04-28 15:10:39,873 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:39,881 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:39,885 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #48 2015-04-28 15:10:39,896 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #137 2015-04-28 15:10:39,896 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#137 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:39,896 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:39,897 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 1 2015-04-28 15:10:39,897 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#137 Retry#0 2015-04-28 15:10:39,897 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#137 Retry#0 Wrote 166 bytes. 2015-04-28 15:10:39,900 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #48 2015-04-28 15:10:39,900 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:39,900 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:39,900 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 42ms 2015-04-28 15:10:39,900 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:39,900 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000004_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:39,901 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000004_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:39,901 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000004 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:39,901 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000004_0 2015-04-28 15:10:39,901 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000004 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:39,901 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #138 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:39,901 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 5 2015-04-28 15:10:39,901 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:39,903 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#138 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:39,904 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:39,904 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 3 procesingTime= 0 2015-04-28 15:10:39,904 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#138 Retry#0 2015-04-28 15:10:39,904 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#138 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:39,907 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #139 2015-04-28 15:10:39,907 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#139 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:39,907 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:39,908 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:39,908 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#139 Retry#0 2015-04-28 15:10:39,908 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#139 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:39,910 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=8, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=39424 2015-04-28 15:10:39,910 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:39,910 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=43031 lastFlushOffset=39439 createNewBlock=false 2015-04-28 15:10:39,910 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 8 2015-04-28 15:10:39,910 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 8 2015-04-28 15:10:39,910 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 8 offsetInBlock: 39424 lastPacketInBlock: false lastByteOffsetInBlock: 43031 2015-04-28 15:10:39,914 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 8 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 2071250 flag: 0 flag: 0 2015-04-28 15:10:39,920 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:39,920 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:39,921 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=9, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=43008 2015-04-28 15:10:39,921 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:39,921 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=45739 lastFlushOffset=43031 createNewBlock=false 2015-04-28 15:10:39,921 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 9 2015-04-28 15:10:39,921 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 9 2015-04-28 15:10:39,922 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 9 offsetInBlock: 43008 lastPacketInBlock: false lastByteOffsetInBlock: 45739 2015-04-28 15:10:39,924 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 9 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1540303 flag: 0 flag: 0 2015-04-28 15:10:39,924 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:40,480 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP143:63567; # active connections: 2; # queued calls: 0 2015-04-28 15:10:40,527 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:8 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:5 CompletedReds:0 ContAlloc:8 ContRel:0 HostLocal:8 RackLocal:0 2015-04-28 15:10:40,528 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #49 2015-04-28 15:10:40,533 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #49 2015-04-28 15:10:40,533 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 5ms 2015-04-28 15:10:40,534 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=1 finishedContainers=1 resourcelimit= knownNMs=2 2015-04-28 15:10:40,534 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000011, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] 2015-04-28 15:10:40,534 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000005 2015-04-28 15:10:40,534 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 1 2015-04-28 15:10:40,534 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:40,534 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000011 with priority 20 to NM host-IP117:64318 2015-04-28 15:10:40,534 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:40,534 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000003_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:40,534 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=8 #asks=0 2015-04-28 15:10:40,534 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:40,534 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000003_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:40,534 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=7 #asks=1 2015-04-28 15:10:40,534 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000003_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:40,534 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=8 #asks=1 2015-04-28 15:10:40,534 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=7 #asks=2 2015-04-28 15:10:40,534 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=8 #asks=2 2015-04-28 15:10:40,534 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=7 #asks=3 2015-04-28 15:10:40,534 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=8 #asks=3 2015-04-28 15:10:40,534 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=7 #asks=4 2015-04-28 15:10:40,534 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:40,534 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000011 to attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:40,534 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000008_0 of type TA_ASSIGNED 2015-04-28 15:10:40,534 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000011, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ]) to task attempt_1430213948957_0001_m_000008_0 on node host-IP117:64318 2015-04-28 15:10:40,534 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:40,534 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP117 2015-04-28 15:10:40,534 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:40,535 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:40,535 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:7 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:5 CompletedReds:0 ContAlloc:9 ContRel:0 HostLocal:9 RackLocal:0 2015-04-28 15:10:40,535 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:40,535 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000008_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:40,535 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:40,535 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:40,535 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000011 taskAttempt attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:40,535 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:40,535 INFO [ContainerLauncher #3] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000011 taskAttempt attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:40,536 INFO [ContainerLauncher #3] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:40,536 INFO [ContainerLauncher #3] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:40,536 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:40,536 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:40,536 DEBUG [ContainerLauncher #3] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:40,537 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:40,538 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:40,538 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:40,539 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:40,539 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:40,541 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"xc8s5FZTDNfzIeQntOhMBFNFRf0Lx8LiyfmRs64H\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:40,541 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@3e569e63 2015-04-28 15:10:40,541 INFO [ContainerLauncher #3] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:40,541 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:40,541 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:40,542 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:40,542 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:40,542 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:40,542 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"xc8s5FZTDNfzIeQntOhMBFNFRf0Lx8LiyfmRs64H\",nc=00000001,cnonce=\"IjiAFAwEhkFe5TmgK+3RIqpMK+qE2JAsia1rkrV+\",digest-uri=\"/default\",maxbuf=65536,response=c75b8271aab7b0a072097039fb281455,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:40,545 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=3962e5d3560b6d3284855fb6cedc041a" 2015-04-28 15:10:40,545 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:40,554 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #50 2015-04-28 15:10:40,554 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:40,564 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #50 2015-04-28 15:10:40,564 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 27ms 2015-04-28 15:10:40,564 INFO [ContainerLauncher #3] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000008_0 : 13562 2015-04-28 15:10:40,565 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:40,565 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:40,565 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:40,565 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000008_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:40,565 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000008_0] using containerId: [container_1430213948957_0001_01_000011 on NM: [host-IP117:64318] 2015-04-28 15:10:40,565 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000008_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:40,565 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:40,565 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:40,565 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:40,565 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:40,565 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:40,565 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:40,565 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000008 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:40,565 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000008 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:40,566 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=10, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=45568 2015-04-28 15:10:40,566 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:40,615 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:40,616 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:40,616 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"TZNcLuHk7SL5J/yDXbkTnbgnJ0ECBELkDoxC0kHe\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:40,616 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63567 Call#-33 Retry#-1 2015-04-28 15:10:40,616 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63567 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:40,716 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:40,716 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:40,716 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:40,716 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:40,717 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:40,717 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:40,717 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:40,717 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:40,717 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=ddb1a8e52104fe6515d4d6ee7abcb730" 2015-04-28 15:10:40,717 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63567 Call#-33 Retry#-1 2015-04-28 15:10:40,717 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63567 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:40,736 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:40,736 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:40,736 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:40,737 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@67422832), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:40,737 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:40,737 INFO [IPC Server handler 4 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000009 asked for a task 2015-04-28 15:10:40,737 INFO [IPC Server handler 4 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000009 given task: attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:40,737 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 0 procesingTime= 0 2015-04-28 15:10:40,738 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@67422832), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#0 Retry#0 2015-04-28 15:10:40,738 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@67422832), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:40,910 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #140 2015-04-28 15:10:40,910 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#140 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:40,911 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:40,911 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:10:40,911 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#140 Retry#0 2015-04-28 15:10:40,911 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#140 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:40,913 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #141 2015-04-28 15:10:40,913 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#141 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:40,913 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:40,914 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:40,914 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#141 Retry#0 2015-04-28 15:10:40,914 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#141 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:40,916 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #142 2015-04-28 15:10:40,916 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#142 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:40,916 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:40,916 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:40,917 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#142 Retry#0 2015-04-28 15:10:40,917 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#142 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:41,262 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP117:16347; # active connections: 3; # queued calls: 0 2015-04-28 15:10:41,312 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:41,313 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: statusUpdate(attempt_1430213948957_0001_m_000005_0, org.apache.hadoop.mapred.MapTaskStatus@33f40b18), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:41,313 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:41,313 INFO [IPC Server handler 2 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000005_0 is : 0.0 2015-04-28 15:10:41,314 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:41,314 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:41,314 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000005_0, org.apache.hadoop.mapred.MapTaskStatus@33f40b18), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#5 Retry#0 2015-04-28 15:10:41,314 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000005_0, org.apache.hadoop.mapred.MapTaskStatus@33f40b18), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:41,319 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000005_0 of type TA_UPDATE 2015-04-28 15:10:41,319 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:41,380 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:41,380 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:41,380 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"7gWk4wB9YvJXAfedQMwBUIu71jxg+dVYhUbV13kp\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:41,380 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16347 Call#-33 Retry#-1 2015-04-28 15:10:41,380 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16347 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:41,392 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:41,392 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: statusUpdate(attempt_1430213948957_0001_m_000005_0, org.apache.hadoop.mapred.MapTaskStatus@637db002), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:41,392 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:41,393 INFO [IPC Server handler 4 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000005_0 is : 1.0 2015-04-28 15:10:41,395 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 3 2015-04-28 15:10:41,395 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:41,396 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000005_0, org.apache.hadoop.mapred.MapTaskStatus@637db002), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#7 Retry#0 2015-04-28 15:10:41,396 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000005_0 of type TA_UPDATE 2015-04-28 15:10:41,396 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000005_0, org.apache.hadoop.mapred.MapTaskStatus@637db002), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:41,396 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:41,397 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:41,398 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: done(attempt_1430213948957_0001_m_000005_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:41,398 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:41,398 INFO [IPC Server handler 5 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:41,398 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 1 procesingTime= 0 2015-04-28 15:10:41,398 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:41,399 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000005_0 of type TA_DONE 2015-04-28 15:10:41,399 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to done(attempt_1430213948957_0001_m_000005_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#8 Retry#0 2015-04-28 15:10:41,399 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000005_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:41,399 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to done(attempt_1430213948957_0001_m_000005_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63563 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:41,399 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000006 taskAttempt attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:41,400 INFO [ContainerLauncher #4] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000006 taskAttempt attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:41,400 INFO [ContainerLauncher #4] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:41,400 INFO [ContainerLauncher #4] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:41,400 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:41,401 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:41,401 DEBUG [ContainerLauncher #4] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:41,401 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP143:63563. Number of active connections: 2 2015-04-28 15:10:41,401 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:41,402 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:41,402 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:41,402 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:41,403 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:41,406 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"J050+LPJrj7LMC5QaSdG7Mhb+Fjh7BPjYti89g+c\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:41,406 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@20a7e5d7 2015-04-28 15:10:41,406 INFO [ContainerLauncher #4] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:41,406 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:41,407 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:41,407 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:41,407 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:41,407 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:41,407 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"J050+LPJrj7LMC5QaSdG7Mhb+Fjh7BPjYti89g+c\",nc=00000001,cnonce=\"OWdCKg0c9mwCiPWfjVEbbdGw7v/Nf//wkG5CGiH0\",digest-uri=\"/default\",maxbuf=65536,response=7648a772ca906d5c7ea00aabd271771a,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:41,410 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=59f12e650e2db1282486952a706f52ee" 2015-04-28 15:10:41,410 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:41,426 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:41,428 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #51 2015-04-28 15:10:41,433 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #51 2015-04-28 15:10:41,433 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:41,433 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:41,434 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 33ms 2015-04-28 15:10:41,435 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:41,435 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000005_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:41,435 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000005_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:41,435 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:41,435 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:41,435 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:41,435 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:41,435 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000005 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:41,435 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000005_0 2015-04-28 15:10:41,435 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000005 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:41,435 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:41,435 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:41,435 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:41,436 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:41,436 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:41,436 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 6 2015-04-28 15:10:41,436 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:41,436 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:41,437 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:41,437 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=49320 lastFlushOffset=45739 createNewBlock=false 2015-04-28 15:10:41,437 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 10 2015-04-28 15:10:41,437 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 10 2015-04-28 15:10:41,438 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 10 offsetInBlock: 45568 lastPacketInBlock: false lastByteOffsetInBlock: 49320 2015-04-28 15:10:41,441 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 10 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1581469 flag: 0 flag: 0 2015-04-28 15:10:41,441 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:41,441 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:41,442 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=11, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=49152 2015-04-28 15:10:41,442 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:41,442 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=52028 lastFlushOffset=49320 createNewBlock=false 2015-04-28 15:10:41,442 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 11 2015-04-28 15:10:41,442 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 11 2015-04-28 15:10:41,442 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 11 offsetInBlock: 49152 lastPacketInBlock: false lastByteOffsetInBlock: 52028 2015-04-28 15:10:41,449 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 11 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1620015 flag: 0 flag: 0 2015-04-28 15:10:41,452 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:41,497 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:41,497 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:41,497 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:41,497 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:41,497 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:41,497 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:41,497 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:41,497 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:41,498 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=e73dab7af9e3e2ec49ab97f91e66f73f" 2015-04-28 15:10:41,498 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16347 Call#-33 Retry#-1 2015-04-28 15:10:41,498 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16347 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:41,519 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:41,519 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:41,519 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:41,519 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@3b5e921a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:41,520 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:41,520 INFO [IPC Server handler 9 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000008 asked for a task 2015-04-28 15:10:41,520 INFO [IPC Server handler 9 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000008 given task: attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:41,520 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:41,521 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@3b5e921a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#0 Retry#0 2015-04-28 15:10:41,521 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@3b5e921a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:41,535 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:7 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:6 CompletedReds:0 ContAlloc:9 ContRel:0 HostLocal:9 RackLocal:0 2015-04-28 15:10:41,535 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #52 2015-04-28 15:10:41,542 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #52 2015-04-28 15:10:41,542 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 7ms 2015-04-28 15:10:41,542 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=1 finishedContainers=1 resourcelimit= knownNMs=2 2015-04-28 15:10:41,543 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000012, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000007 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 1 2015-04-28 15:10:41,543 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000012 with priority 20 to NM host-IP117:64318 2015-04-28 15:10:41,543 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:41,543 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=7 #asks=0 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=6 #asks=1 2015-04-28 15:10:41,543 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=7 #asks=1 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=6 #asks=2 2015-04-28 15:10:41,543 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=7 #asks=2 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=6 #asks=3 2015-04-28 15:10:41,543 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=7 #asks=3 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=6 #asks=4 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000012 to attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000012, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ]) to task attempt_1430213948957_0001_m_000009_0 on node host-IP117:64318 2015-04-28 15:10:41,543 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP117 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:41,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:6 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:6 CompletedReds:0 ContAlloc:10 ContRel:0 HostLocal:10 RackLocal:0 2015-04-28 15:10:41,543 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:41,543 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000004_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:41,543 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:41,543 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000004_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:41,543 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000004_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:41,543 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:41,544 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000009_0 of type TA_ASSIGNED 2015-04-28 15:10:41,544 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:41,544 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:41,545 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000009_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:41,545 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:41,545 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:41,545 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000012 taskAttempt attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:41,545 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:41,546 INFO [ContainerLauncher #5] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000012 taskAttempt attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:41,546 INFO [ContainerLauncher #5] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:41,546 INFO [ContainerLauncher #5] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:41,546 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:41,546 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:41,547 DEBUG [ContainerLauncher #5] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:41,547 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:41,549 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:41,549 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:41,550 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:41,550 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:41,551 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"N0xGDrz26JkCp7b2i47xeC+SOKM8O5EKZh5JBf4i\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:41,551 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@ec93afa 2015-04-28 15:10:41,552 INFO [ContainerLauncher #5] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:41,552 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:41,552 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:41,552 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:41,552 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:41,552 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:41,553 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"N0xGDrz26JkCp7b2i47xeC+SOKM8O5EKZh5JBf4i\",nc=00000001,cnonce=\"XH8bRi3AuiB9INfqRtB7/8WjrTQEqnEniRG6BTj5\",digest-uri=\"/default\",maxbuf=65536,response=d3eccd601567f867ead354b5800750ed,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:41,556 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=35146eb69593d1f6850d834db4e943a5" 2015-04-28 15:10:41,556 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:41,556 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:41,559 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #53 2015-04-28 15:10:41,565 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #53 2015-04-28 15:10:41,566 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 17ms 2015-04-28 15:10:41,566 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:41,566 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:41,566 INFO [ContainerLauncher #5] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000009_0 : 13562 2015-04-28 15:10:41,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:41,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000009_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:41,566 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000009_0] using containerId: [container_1430213948957_0001_01_000012 on NM: [host-IP117:64318] 2015-04-28 15:10:41,566 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000009_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:41,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:41,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:41,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:41,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:41,566 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:41,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:41,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000009 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:41,566 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000009 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:41,567 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=12, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=51712 2015-04-28 15:10:41,567 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:41,920 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #143 2015-04-28 15:10:41,921 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#143 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:41,921 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:41,921 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 0 2015-04-28 15:10:41,921 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#143 Retry#0 2015-04-28 15:10:41,921 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#143 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:41,923 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #144 2015-04-28 15:10:41,923 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#144 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:41,924 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:41,924 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:41,924 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#144 Retry#0 2015-04-28 15:10:41,924 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#144 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:41,926 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #145 2015-04-28 15:10:41,926 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#145 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:41,926 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:41,926 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:41,926 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#145 Retry#0 2015-04-28 15:10:41,927 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#145 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:42,242 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:42,243 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: statusUpdate(attempt_1430213948957_0001_m_000006_0, org.apache.hadoop.mapred.MapTaskStatus@5b3bb7ea), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:42,243 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:42,243 INFO [IPC Server handler 0 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000006_0 is : 0.0 2015-04-28 15:10:42,244 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:42,244 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:42,244 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000006_0, org.apache.hadoop.mapred.MapTaskStatus@5b3bb7ea), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#5 Retry#0 2015-04-28 15:10:42,245 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000006_0, org.apache.hadoop.mapred.MapTaskStatus@5b3bb7ea), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:42,245 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000006_0 of type TA_UPDATE 2015-04-28 15:10:42,245 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:42,306 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:42,306 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: statusUpdate(attempt_1430213948957_0001_m_000006_0, org.apache.hadoop.mapred.MapTaskStatus@698a8ee), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:42,306 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:42,307 INFO [IPC Server handler 2 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000006_0 is : 1.0 2015-04-28 15:10:42,308 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 2 2015-04-28 15:10:42,308 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:42,308 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000006_0, org.apache.hadoop.mapred.MapTaskStatus@698a8ee), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#7 Retry#0 2015-04-28 15:10:42,308 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000006_0, org.apache.hadoop.mapred.MapTaskStatus@698a8ee), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:42,309 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000006_0 of type TA_UPDATE 2015-04-28 15:10:42,310 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:42,310 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:42,310 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: done(attempt_1430213948957_0001_m_000006_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:42,310 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:42,311 INFO [IPC Server handler 4 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:42,311 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 1 procesingTime= 0 2015-04-28 15:10:42,311 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:42,311 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000006_0 of type TA_DONE 2015-04-28 15:10:42,311 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to done(attempt_1430213948957_0001_m_000006_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#8 Retry#0 2015-04-28 15:10:42,311 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000006_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:42,311 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to done(attempt_1430213948957_0001_m_000006_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63567 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:42,311 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000009 taskAttempt attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:42,311 INFO [ContainerLauncher #6] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000009 taskAttempt attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:42,312 INFO [ContainerLauncher #6] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:42,312 INFO [ContainerLauncher #6] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:42,312 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:42,312 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:42,312 DEBUG [ContainerLauncher #6] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:42,313 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:42,313 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:42,313 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:42,313 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP143:63567. Number of active connections: 1 2015-04-28 15:10:42,314 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:42,314 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:42,316 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"t7ZkXsNYBh1R0wvKkbgGhbKEBF0HyY7jowsFSAk+\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:42,316 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@5370dde8 2015-04-28 15:10:42,316 INFO [ContainerLauncher #6] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:42,316 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:42,317 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:42,317 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:42,317 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:42,317 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:42,317 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"t7ZkXsNYBh1R0wvKkbgGhbKEBF0HyY7jowsFSAk+\",nc=00000001,cnonce=\"1/FcCphmPnYasCskcgCBdZhOWFE2vPHZe2b28Ace\",digest-uri=\"/default\",maxbuf=65536,response=5e2ee2676776f7f2a1a81af992efcc37,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:42,320 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=06bce99084b7e0a499af4d374bdbbe33" 2015-04-28 15:10:42,320 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:42,321 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:42,321 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #54 2015-04-28 15:10:42,326 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #54 2015-04-28 15:10:42,326 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 13ms 2015-04-28 15:10:42,326 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:42,326 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:42,326 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:42,326 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000006_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:42,326 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000006_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:42,326 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:42,326 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:42,326 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:42,327 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:42,327 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000006 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:42,327 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000006_0 2015-04-28 15:10:42,327 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000006 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:42,327 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:42,327 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:42,327 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:42,327 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:42,327 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:42,327 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 7 2015-04-28 15:10:42,327 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:42,327 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:42,329 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:42,329 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=55597 lastFlushOffset=52028 createNewBlock=false 2015-04-28 15:10:42,329 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 12 2015-04-28 15:10:42,329 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 12 2015-04-28 15:10:42,329 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 12 offsetInBlock: 51712 lastPacketInBlock: false lastByteOffsetInBlock: 55597 2015-04-28 15:10:42,334 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 12 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1616114 flag: 0 flag: 0 2015-04-28 15:10:42,334 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:42,334 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:42,335 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=13, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=55296 2015-04-28 15:10:42,335 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:42,337 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=58305 lastFlushOffset=55597 createNewBlock=false 2015-04-28 15:10:42,337 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 13 2015-04-28 15:10:42,337 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 13 2015-04-28 15:10:42,338 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 13 offsetInBlock: 55296 lastPacketInBlock: false lastByteOffsetInBlock: 58305 2015-04-28 15:10:42,343 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 13 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 4972162 flag: 0 flag: 0 2015-04-28 15:10:42,344 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:42,543 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:6 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:7 CompletedReds:0 ContAlloc:10 ContRel:0 HostLocal:10 RackLocal:0 2015-04-28 15:10:42,544 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #55 2015-04-28 15:10:42,550 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #55 2015-04-28 15:10:42,550 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 6ms 2015-04-28 15:10:42,550 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=1 finishedContainers=1 resourcelimit= knownNMs=2 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000010, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ] 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000006 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 1 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000010 with priority 20 to NM host-IP143:64318 2015-04-28 15:10:42,551 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:42,551 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000005_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=6 #asks=0 2015-04-28 15:10:42,551 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=5 #asks=1 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=6 #asks=1 2015-04-28 15:10:42,551 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000005_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=5 #asks=2 2015-04-28 15:10:42,551 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000005_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=6 #asks=2 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=5 #asks=3 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=6 #asks=3 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=5 #asks=4 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000010 to attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:42,551 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000010, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ]) to task attempt_1430213948957_0001_m_000010_0 on node host-IP143:64318 2015-04-28 15:10:42,551 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP143 2015-04-28 15:10:42,551 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000010_0 of type TA_ASSIGNED 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:42,551 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:42,551 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:5 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:7 CompletedReds:0 ContAlloc:11 ContRel:0 HostLocal:11 RackLocal:0 2015-04-28 15:10:42,552 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:42,552 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000010_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:42,552 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:42,552 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:42,553 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000010 taskAttempt attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:42,553 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:42,553 INFO [ContainerLauncher #7] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000010 taskAttempt attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:42,553 INFO [ContainerLauncher #7] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:42,553 INFO [ContainerLauncher #7] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:42,553 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:42,554 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:42,554 DEBUG [ContainerLauncher #7] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:42,554 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:42,555 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:42,555 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:42,556 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:42,556 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:42,558 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"bhpRNzkH1PiNWnJcxvKytVqHms2crTOn69YLUfYW\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:42,558 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@128784a4 2015-04-28 15:10:42,558 INFO [ContainerLauncher #7] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:42,559 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:42,559 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:42,559 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:42,559 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:42,559 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:42,560 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"bhpRNzkH1PiNWnJcxvKytVqHms2crTOn69YLUfYW\",nc=00000001,cnonce=\"lEvL1x+K9MkP+8jaxU+IsZ0XJvninNw67eiFMKrx\",digest-uri=\"/default\",maxbuf=65536,response=faa691fcecc474834b53058a2b8e183f,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:42,569 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=12141df19573a5081ac8f8f6f37a94a1" 2015-04-28 15:10:42,570 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:42,570 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #56 2015-04-28 15:10:42,573 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:42,576 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #56 2015-04-28 15:10:42,576 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 21ms 2015-04-28 15:10:42,576 INFO [ContainerLauncher #7] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000010_0 : 13562 2015-04-28 15:10:42,576 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:42,576 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000010_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:42,576 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:42,576 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:42,576 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000010_0] using containerId: [container_1430213948957_0001_01_000010 on NM: [host-IP143:64318] 2015-04-28 15:10:42,577 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000010_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:42,577 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:42,577 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:42,577 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:42,577 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:42,577 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:42,577 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000010 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:42,577 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000010 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:42,577 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:42,578 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=14, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=57856 2015-04-28 15:10:42,578 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:42,929 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #146 2015-04-28 15:10:42,929 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#146 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:42,929 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:42,930 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 1 2015-04-28 15:10:42,930 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#146 Retry#0 2015-04-28 15:10:42,930 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#146 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:42,932 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #147 2015-04-28 15:10:42,932 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#147 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:42,932 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:42,932 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:42,933 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#147 Retry#0 2015-04-28 15:10:42,933 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#147 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:42,941 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #148 2015-04-28 15:10:42,941 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#148 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:42,941 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:42,941 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:42,941 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#148 Retry#0 2015-04-28 15:10:42,941 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#148 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:43,084 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP117:16348; # active connections: 2; # queued calls: 0 2015-04-28 15:10:43,210 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:43,210 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:43,210 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"8U6ThTkxGAB40Ji21spxyDnqaoOfrsmtlQRJSB3x\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:43,210 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16348 Call#-33 Retry#-1 2015-04-28 15:10:43,210 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16348 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:43,405 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:43,406 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:43,406 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:43,406 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:43,406 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:43,406 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:43,406 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:43,406 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:43,406 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=916be5fee9c22c5294caa78b3294117a" 2015-04-28 15:10:43,406 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16348 Call#-33 Retry#-1 2015-04-28 15:10:43,407 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16348 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:43,459 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:43,460 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:43,460 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:43,460 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@3f6101a9), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:43,460 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:43,461 INFO [IPC Server handler 9 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000011 asked for a task 2015-04-28 15:10:43,461 INFO [IPC Server handler 9 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000011 given task: attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:43,461 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:43,461 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@3f6101a9), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#0 Retry#0 2015-04-28 15:10:43,461 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@3f6101a9), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:43,552 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #57 2015-04-28 15:10:43,559 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #57 2015-04-28 15:10:43,559 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 7ms 2015-04-28 15:10:43,560 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=1 finishedContainers=1 resourcelimit= knownNMs=2 2015-04-28 15:10:43,560 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000014, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ] 2015-04-28 15:10:43,560 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000009 2015-04-28 15:10:43,560 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 1 2015-04-28 15:10:43,560 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:43,560 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000014 with priority 20 to NM host-IP143:64318 2015-04-28 15:10:43,560 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:43,560 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000006_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:43,560 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=5 #asks=0 2015-04-28 15:10:43,560 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=4 #asks=1 2015-04-28 15:10:43,560 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:43,560 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=5 #asks=1 2015-04-28 15:10:43,560 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=4 #asks=2 2015-04-28 15:10:43,560 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000006_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:43,560 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=5 #asks=2 2015-04-28 15:10:43,560 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000006_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:43,560 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=4 #asks=3 2015-04-28 15:10:43,560 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=5 #asks=3 2015-04-28 15:10:43,560 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=4 #asks=4 2015-04-28 15:10:43,560 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:43,560 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000014 to attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:43,560 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000011_0 of type TA_ASSIGNED 2015-04-28 15:10:43,560 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000014, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ]) to task attempt_1430213948957_0001_m_000011_0 on node host-IP143:64318 2015-04-28 15:10:43,560 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:43,560 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP143 2015-04-28 15:10:43,560 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:43,561 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:43,561 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:4 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:7 CompletedReds:0 ContAlloc:12 ContRel:0 HostLocal:12 RackLocal:0 2015-04-28 15:10:43,561 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:43,561 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000011_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:43,561 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:43,561 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:43,561 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000014 taskAttempt attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:43,561 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:43,561 INFO [ContainerLauncher #8] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000014 taskAttempt attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:43,561 INFO [ContainerLauncher #8] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:43,561 INFO [ContainerLauncher #8] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:43,562 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:43,562 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:43,562 DEBUG [ContainerLauncher #8] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:43,562 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:43,563 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:43,563 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:43,564 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:43,564 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:43,565 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"m8z3T3ZlwZKjUHm2YCROj3kISrB1EwFAj0LxARRz\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:43,565 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@41ef18bf 2015-04-28 15:10:43,566 INFO [ContainerLauncher #8] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:43,566 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:43,566 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:43,566 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:43,566 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:43,566 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:43,567 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"m8z3T3ZlwZKjUHm2YCROj3kISrB1EwFAj0LxARRz\",nc=00000001,cnonce=\"RpDwVAMjj93RQ+30sNtWPWpga3WkujylxFjmGVl/\",digest-uri=\"/default\",maxbuf=65536,response=1ee67ea8b61b5a1e6231a2d55eb96144,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:43,570 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=49a461b6815bd57b87a82a9497dff024" 2015-04-28 15:10:43,570 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:43,571 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #58 2015-04-28 15:10:43,573 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:43,579 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #58 2015-04-28 15:10:43,579 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 16ms 2015-04-28 15:10:43,579 INFO [ContainerLauncher #8] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000011_0 : 13562 2015-04-28 15:10:43,580 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:43,580 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:43,581 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:43,581 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000011_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:43,581 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000011_0] using containerId: [container_1430213948957_0001_01_000014 on NM: [host-IP143:64318] 2015-04-28 15:10:43,581 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000011_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:43,581 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:43,581 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:43,581 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:43,581 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:43,581 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:43,581 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000011 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:43,581 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000011 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:43,583 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:43,583 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:43,944 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #149 2015-04-28 15:10:43,948 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#149 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:43,948 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:43,949 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 4 procesingTime= 1 2015-04-28 15:10:43,949 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#149 Retry#0 2015-04-28 15:10:43,949 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#149 Retry#0 Wrote 33 bytes. 2015-04-28 15:10:43,951 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #150 2015-04-28 15:10:43,951 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#150 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:43,951 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:43,951 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:43,951 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#150 Retry#0 2015-04-28 15:10:43,951 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#150 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:43,953 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #151 2015-04-28 15:10:43,954 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#151 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:43,954 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:43,954 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:43,954 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#151 Retry#0 2015-04-28 15:10:43,954 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#151 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:43,963 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP143:63578; # active connections: 3; # queued calls: 0 2015-04-28 15:10:44,097 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:44,097 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:44,097 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"Xgv0Iq8tQLO5jGBKkuGIMSHqYbjR8jZKhaZEpA4w\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:44,097 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63578 Call#-33 Retry#-1 2015-04-28 15:10:44,097 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63578 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:44,220 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:44,220 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:44,220 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:44,220 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:44,220 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:44,220 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:44,221 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:44,221 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:44,221 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=01b306e13122b3ef7f8065ee02cc801b" 2015-04-28 15:10:44,221 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63578 Call#-33 Retry#-1 2015-04-28 15:10:44,221 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63578 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:44,235 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:44,235 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:44,235 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:44,235 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@6e211001), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:44,236 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:44,236 INFO [IPC Server handler 0 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000010 asked for a task 2015-04-28 15:10:44,236 INFO [IPC Server handler 0 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000010 given task: attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:44,236 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:44,237 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@6e211001), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#0 Retry#0 2015-04-28 15:10:44,237 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@6e211001), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:44,442 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:44,443 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: statusUpdate(attempt_1430213948957_0001_m_000007_0, org.apache.hadoop.mapred.MapTaskStatus@7a639821), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:44,443 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:44,443 INFO [IPC Server handler 9 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000007_0 is : 0.0 2015-04-28 15:10:44,444 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:44,444 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:44,444 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000007_0, org.apache.hadoop.mapred.MapTaskStatus@7a639821), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#5 Retry#0 2015-04-28 15:10:44,444 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000007_0 of type TA_UPDATE 2015-04-28 15:10:44,444 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000007_0, org.apache.hadoop.mapred.MapTaskStatus@7a639821), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:44,444 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:44,561 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #59 2015-04-28 15:10:44,573 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #59 2015-04-28 15:10:44,573 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 12ms 2015-04-28 15:10:44,574 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=0 finishedContainers=0 resourcelimit= knownNMs=2 2015-04-28 15:10:44,574 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:44,574 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:44,612 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP117:16351; # active connections: 4; # queued calls: 0 2015-04-28 15:10:44,710 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:44,711 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: statusUpdate(attempt_1430213948957_0001_m_000007_0, org.apache.hadoop.mapred.MapTaskStatus@14739943), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:44,711 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:44,711 INFO [IPC Server handler 16 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000007_0 is : 1.0 2015-04-28 15:10:44,712 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:44,712 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:44,713 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000007_0 of type TA_UPDATE 2015-04-28 15:10:44,713 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000007_0, org.apache.hadoop.mapred.MapTaskStatus@14739943), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#7 Retry#0 2015-04-28 15:10:44,713 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000007_0, org.apache.hadoop.mapred.MapTaskStatus@14739943), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:44,713 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:44,715 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:44,715 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: done(attempt_1430213948957_0001_m_000007_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:44,715 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:44,715 INFO [IPC Server handler 10 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:44,715 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 0 procesingTime= 0 2015-04-28 15:10:44,715 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: responding to done(attempt_1430213948957_0001_m_000007_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#8 Retry#0 2015-04-28 15:10:44,716 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: responding to done(attempt_1430213948957_0001_m_000007_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16347 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:44,716 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:44,716 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000007_0 of type TA_DONE 2015-04-28 15:10:44,716 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000007_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:44,716 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000008 taskAttempt attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:44,717 INFO [ContainerLauncher #9] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000008 taskAttempt attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:44,717 INFO [ContainerLauncher #9] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:44,717 INFO [ContainerLauncher #9] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:44,717 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:44,717 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:44,718 DEBUG [ContainerLauncher #9] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:44,718 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP117:16347. Number of active connections: 3 2015-04-28 15:10:44,718 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:44,719 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:44,719 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:44,720 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:44,720 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:44,722 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"IahWASNnliaQ0Zc7yYNIpJyLbnLyczDn3n1i9aTw\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:44,722 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@41bf30de 2015-04-28 15:10:44,723 INFO [ContainerLauncher #9] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:44,723 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:44,723 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:44,723 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:44,723 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:44,723 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:44,723 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"IahWASNnliaQ0Zc7yYNIpJyLbnLyczDn3n1i9aTw\",nc=00000001,cnonce=\"2mcbRl44gh7EyB5Lkky5Sh88JaRYTwPhirBF+3Xm\",digest-uri=\"/default\",maxbuf=65536,response=38a52d2265129e40b15d063094ef75ae,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:44,734 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=35fb778c155ca4e780e1134100579047" 2015-04-28 15:10:44,734 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:44,749 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:44,751 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #60 2015-04-28 15:10:44,770 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #60 2015-04-28 15:10:44,770 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 52ms 2015-04-28 15:10:44,771 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:44,771 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000007_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:44,771 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000007_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:44,771 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:44,771 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:44,771 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:44,771 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:44,771 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000007 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:44,771 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000007_0 2015-04-28 15:10:44,771 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000007 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:44,772 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:44,772 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:44,772 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:44,772 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:44,772 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:44,772 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 8 2015-04-28 15:10:44,772 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:44,772 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:44,777 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:44,777 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:44,778 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:44,778 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=62332 lastFlushOffset=58305 createNewBlock=false 2015-04-28 15:10:44,778 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 14 2015-04-28 15:10:44,778 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 14 2015-04-28 15:10:44,783 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 14 offsetInBlock: 57856 lastPacketInBlock: false lastByteOffsetInBlock: 62332 2015-04-28 15:10:44,786 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 14 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1552270 flag: 0 flag: 0 2015-04-28 15:10:44,786 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:44,786 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:44,787 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=15, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=61952 2015-04-28 15:10:44,787 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:44,787 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=65041 lastFlushOffset=62332 createNewBlock=false 2015-04-28 15:10:44,787 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 15 2015-04-28 15:10:44,787 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 15 2015-04-28 15:10:44,787 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 15 offsetInBlock: 61952 lastPacketInBlock: false lastByteOffsetInBlock: 65041 2015-04-28 15:10:44,792 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 15 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 4060360 flag: 0 flag: 0 2015-04-28 15:10:44,793 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:44,797 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:44,798 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:44,798 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"oCi+G7uHCGAj6sHFzMvwTT6tpTM7dCfOoMgrGXeq\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:44,798 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16351 Call#-33 Retry#-1 2015-04-28 15:10:44,798 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16351 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:44,943 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:44,943 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:44,943 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:44,943 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:44,944 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:44,944 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:44,944 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:44,944 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:44,944 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=3a462c10ba8d05db8d81937a1a764dd9" 2015-04-28 15:10:44,944 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16351 Call#-33 Retry#-1 2015-04-28 15:10:44,944 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16351 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:44,962 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:44,962 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:44,962 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:44,962 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@4b6d3a63), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:44,963 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:44,963 INFO [IPC Server handler 15 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000012 asked for a task 2015-04-28 15:10:44,963 INFO [IPC Server handler 15 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000012 given task: attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:44,963 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:44,963 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #152 2015-04-28 15:10:44,964 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@4b6d3a63), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#0 Retry#0 2015-04-28 15:10:44,964 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@4b6d3a63), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:44,966 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#152 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:44,966 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:44,966 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 3 procesingTime= 0 2015-04-28 15:10:44,966 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#152 Retry#0 2015-04-28 15:10:44,966 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#152 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:44,969 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #153 2015-04-28 15:10:44,969 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#153 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:44,969 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:44,970 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:44,970 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#153 Retry#0 2015-04-28 15:10:44,970 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#153 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:44,972 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #154 2015-04-28 15:10:44,972 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#154 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:44,972 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:44,972 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:44,973 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#154 Retry#0 2015-04-28 15:10:44,973 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#154 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:45,533 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP143:63581; # active connections: 4; # queued calls: 0 2015-04-28 15:10:45,574 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:4 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:8 CompletedReds:0 ContAlloc:12 ContRel:0 HostLocal:12 RackLocal:0 2015-04-28 15:10:45,575 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #61 2015-04-28 15:10:45,580 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #61 2015-04-28 15:10:45,580 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 5ms 2015-04-28 15:10:45,580 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:45,580 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:45,618 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:45,618 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:45,618 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"0mIgykp3tolUtCQq51taLzFnJndiqR69SRAR7/ZL\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:45,618 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63581 Call#-33 Retry#-1 2015-04-28 15:10:45,618 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63581 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:45,685 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:45,688 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: statusUpdate(attempt_1430213948957_0001_m_000008_0, org.apache.hadoop.mapred.MapTaskStatus@1e8bf233), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:45,688 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:45,689 INFO [IPC Server handler 16 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000008_0 is : 0.0 2015-04-28 15:10:45,689 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 3 procesingTime= 1 2015-04-28 15:10:45,689 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000008_0, org.apache.hadoop.mapred.MapTaskStatus@1e8bf233), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#5 Retry#0 2015-04-28 15:10:45,690 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000008_0, org.apache.hadoop.mapred.MapTaskStatus@1e8bf233), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:45,690 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:45,700 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000008_0 of type TA_UPDATE 2015-04-28 15:10:45,700 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:45,722 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:45,722 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:45,722 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:45,722 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:45,722 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:45,722 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:45,722 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:45,722 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:45,723 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=1b3566daf12ccfa55cfd067c0644e65c" 2015-04-28 15:10:45,723 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63581 Call#-33 Retry#-1 2015-04-28 15:10:45,723 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63581 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:45,740 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:45,740 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:45,740 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:45,740 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@45aadd7e), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:45,741 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:45,741 INFO [IPC Server handler 15 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000014 asked for a task 2015-04-28 15:10:45,741 INFO [IPC Server handler 15 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000014 given task: attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:45,741 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:45,742 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@45aadd7e), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#0 Retry#0 2015-04-28 15:10:45,742 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@45aadd7e), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:45,803 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:45,803 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: statusUpdate(attempt_1430213948957_0001_m_000008_0, org.apache.hadoop.mapred.MapTaskStatus@19fb8ed2), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:45,803 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:45,803 INFO [IPC Server handler 17 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000008_0 is : 1.0 2015-04-28 15:10:45,804 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:45,805 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000008_0, org.apache.hadoop.mapred.MapTaskStatus@19fb8ed2), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#7 Retry#0 2015-04-28 15:10:45,805 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000008_0, org.apache.hadoop.mapred.MapTaskStatus@19fb8ed2), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:45,805 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:45,806 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000008_0 of type TA_UPDATE 2015-04-28 15:10:45,806 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:45,817 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:45,817 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 27 on 21207: done(attempt_1430213948957_0001_m_000008_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:45,817 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:45,818 INFO [IPC Server handler 27 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:45,818 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 0 procesingTime= 1 2015-04-28 15:10:45,818 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 27 on 21207: responding to done(attempt_1430213948957_0001_m_000008_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#8 Retry#0 2015-04-28 15:10:45,818 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 27 on 21207: responding to done(attempt_1430213948957_0001_m_000008_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16348 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:45,818 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:45,819 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000008_0 of type TA_DONE 2015-04-28 15:10:45,819 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000008_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:45,819 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000011 taskAttempt attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:45,819 INFO [ContainerLauncher #0] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000011 taskAttempt attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:45,819 INFO [ContainerLauncher #0] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:45,819 INFO [ContainerLauncher #0] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:45,820 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:45,820 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:45,820 DEBUG [ContainerLauncher #0] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:45,820 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:45,820 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:45,820 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:45,821 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP117:16348. Number of active connections: 3 2015-04-28 15:10:45,821 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:45,821 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:45,823 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"lHC6ryj5bMs6j1yELgIs7tRjzrVf71L9xmy2O5MO\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:45,823 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@26486c0a 2015-04-28 15:10:45,823 INFO [ContainerLauncher #0] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:45,823 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:45,824 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:45,824 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:45,824 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:45,824 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:45,824 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"lHC6ryj5bMs6j1yELgIs7tRjzrVf71L9xmy2O5MO\",nc=00000001,cnonce=\"tIoOUwa9nHgyXZb6+LqQrB1NxV1dHJq67v+LuMsE\",digest-uri=\"/default\",maxbuf=65536,response=980fca33104e20569ab6727677c9475b,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:45,827 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=599039d7065b91796f34ac739958f8b1" 2015-04-28 15:10:45,828 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:45,848 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:45,848 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #62 2015-04-28 15:10:45,852 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #62 2015-04-28 15:10:45,853 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:45,853 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:45,853 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 33ms 2015-04-28 15:10:45,853 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:45,853 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000008_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:45,854 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000008_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:45,854 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000008 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:45,854 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000008_0 2015-04-28 15:10:45,854 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000008 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:45,854 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 9 2015-04-28 15:10:45,854 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:45,855 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=16, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=65024 2015-04-28 15:10:45,856 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:45,856 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=68189 lastFlushOffset=65041 createNewBlock=false 2015-04-28 15:10:45,856 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 16 2015-04-28 15:10:45,856 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 16 2015-04-28 15:10:45,856 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 16 offsetInBlock: 65024 lastPacketInBlock: false lastByteOffsetInBlock: 68189 2015-04-28 15:10:45,858 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 16 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1587715 flag: 0 flag: 0 2015-04-28 15:10:45,859 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:45,859 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:45,859 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=17, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=68096 2015-04-28 15:10:45,860 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:45,860 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=70898 lastFlushOffset=68189 createNewBlock=false 2015-04-28 15:10:45,860 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 17 2015-04-28 15:10:45,860 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 17 2015-04-28 15:10:45,860 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 17 offsetInBlock: 68096 lastPacketInBlock: false lastByteOffsetInBlock: 70898 2015-04-28 15:10:45,868 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 17 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 7430914 flag: 0 flag: 0 2015-04-28 15:10:45,869 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:45,976 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #155 2015-04-28 15:10:45,976 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#155 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:45,976 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:45,977 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 1 2015-04-28 15:10:45,977 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#155 Retry#0 2015-04-28 15:10:45,977 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#155 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:45,978 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #156 2015-04-28 15:10:45,979 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#156 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:45,979 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:45,979 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:45,979 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#156 Retry#0 2015-04-28 15:10:45,979 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#156 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:45,982 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #157 2015-04-28 15:10:45,982 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#157 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:45,982 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:45,982 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:45,982 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#157 Retry#0 2015-04-28 15:10:45,982 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#157 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:46,140 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf: closed 2015-04-28 15:10:46,140 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf: stopped, remaining connections 1 2015-04-28 15:10:46,308 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:46,309 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: statusUpdate(attempt_1430213948957_0001_m_000010_0, org.apache.hadoop.mapred.MapTaskStatus@62933538), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:46,309 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:46,309 INFO [IPC Server handler 2 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000010_0 is : 0.0 2015-04-28 15:10:46,310 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:46,310 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000010_0, org.apache.hadoop.mapred.MapTaskStatus@62933538), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#5 Retry#0 2015-04-28 15:10:46,310 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000010_0, org.apache.hadoop.mapred.MapTaskStatus@62933538), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:46,311 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:46,311 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000010_0 of type TA_UPDATE 2015-04-28 15:10:46,311 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:46,380 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:46,381 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: statusUpdate(attempt_1430213948957_0001_m_000010_0, org.apache.hadoop.mapred.MapTaskStatus@485ceb35), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:46,381 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:46,381 INFO [IPC Server handler 5 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000010_0 is : 1.0 2015-04-28 15:10:46,386 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 5 2015-04-28 15:10:46,386 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000010_0, org.apache.hadoop.mapred.MapTaskStatus@485ceb35), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#7 Retry#0 2015-04-28 15:10:46,386 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000010_0, org.apache.hadoop.mapred.MapTaskStatus@485ceb35), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:46,386 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:46,387 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000010_0 of type TA_UPDATE 2015-04-28 15:10:46,387 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:46,387 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:46,387 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: done(attempt_1430213948957_0001_m_000010_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:46,388 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:46,388 INFO [IPC Server handler 9 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:46,388 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 1 procesingTime= 0 2015-04-28 15:10:46,388 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:46,388 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to done(attempt_1430213948957_0001_m_000010_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#8 Retry#0 2015-04-28 15:10:46,388 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to done(attempt_1430213948957_0001_m_000010_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63578 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:46,389 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000010_0 of type TA_DONE 2015-04-28 15:10:46,389 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000010_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:46,389 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000010 taskAttempt attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:46,389 INFO [ContainerLauncher #1] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000010 taskAttempt attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:46,390 INFO [ContainerLauncher #1] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:46,390 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP143:63578. Number of active connections: 2 2015-04-28 15:10:46,390 INFO [ContainerLauncher #1] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:46,390 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:46,391 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:46,391 DEBUG [ContainerLauncher #1] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:46,391 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:46,391 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:46,391 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:46,392 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:46,392 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:46,394 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"yoBJDIiIaienrBKxtLpPsD5Mg6pd11747nqHQ2XA\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:46,394 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@5788a645 2015-04-28 15:10:46,394 INFO [ContainerLauncher #1] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:46,394 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:46,395 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:46,395 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:46,395 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:46,395 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:46,395 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"yoBJDIiIaienrBKxtLpPsD5Mg6pd11747nqHQ2XA\",nc=00000001,cnonce=\"MRooyOyit1aSpGSMxnNPtLuQ/4oBWouDHVD4R3KL\",digest-uri=\"/default\",maxbuf=65536,response=867b425f3b9d55dbf1a6abb1cab35b3f,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:46,399 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=ffa8efff28fc8d0582cfc4ea6462d833" 2015-04-28 15:10:46,399 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:46,399 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:46,400 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #63 2015-04-28 15:10:46,406 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #63 2015-04-28 15:10:46,406 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 15ms 2015-04-28 15:10:46,406 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:46,406 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:46,406 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:46,406 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000010_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:46,406 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000010_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:46,406 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:46,406 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:46,406 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:46,406 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:46,406 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000010 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:46,407 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000010_0 2015-04-28 15:10:46,407 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000010 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:46,407 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:46,407 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:46,407 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:46,407 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:46,407 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:46,407 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 10 2015-04-28 15:10:46,407 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:46,407 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:46,410 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=18, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=70656 2015-04-28 15:10:46,411 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:46,411 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=74046 lastFlushOffset=70898 createNewBlock=false 2015-04-28 15:10:46,411 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 18 2015-04-28 15:10:46,411 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 18 2015-04-28 15:10:46,411 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 18 offsetInBlock: 70656 lastPacketInBlock: false lastByteOffsetInBlock: 74046 2015-04-28 15:10:46,414 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 18 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1756785 flag: 0 flag: 0 2015-04-28 15:10:46,414 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:46,415 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:46,415 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=19, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=73728 2015-04-28 15:10:46,415 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:46,415 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=76755 lastFlushOffset=74046 createNewBlock=false 2015-04-28 15:10:46,415 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 19 2015-04-28 15:10:46,415 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 19 2015-04-28 15:10:46,416 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 19 offsetInBlock: 73728 lastPacketInBlock: false lastByteOffsetInBlock: 76755 2015-04-28 15:10:46,420 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 19 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1310424 flag: 0 flag: 0 2015-04-28 15:10:46,420 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:46,558 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:46,558 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: statusUpdate(attempt_1430213948957_0001_m_000009_0, org.apache.hadoop.mapred.MapTaskStatus@5b896f86), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:46,558 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:46,559 INFO [IPC Server handler 16 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000009_0 is : 0.0 2015-04-28 15:10:46,559 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 1 procesingTime= 0 2015-04-28 15:10:46,559 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000009_0, org.apache.hadoop.mapred.MapTaskStatus@5b896f86), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#5 Retry#0 2015-04-28 15:10:46,559 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000009_0, org.apache.hadoop.mapred.MapTaskStatus@5b896f86), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:46,560 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:46,560 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000009_0 of type TA_UPDATE 2015-04-28 15:10:46,560 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:46,580 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:4 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:10 CompletedReds:0 ContAlloc:12 ContRel:0 HostLocal:12 RackLocal:0 2015-04-28 15:10:46,581 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #64 2015-04-28 15:10:46,586 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #64 2015-04-28 15:10:46,586 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 6ms 2015-04-28 15:10:46,587 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000013, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000008 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 1 2015-04-28 15:10:46,587 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:46,587 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000013 with priority 20 to NM host-IP117:64318 2015-04-28 15:10:46,587 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000007_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:46,587 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:46,587 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:46,587 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:46,587 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:46,587 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000007_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:46,587 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=4 #asks=0 2015-04-28 15:10:46,587 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000007_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=3 #asks=1 2015-04-28 15:10:46,587 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=4 #asks=1 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=3 #asks=2 2015-04-28 15:10:46,587 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=4 #asks=2 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=3 #asks=3 2015-04-28 15:10:46,587 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=4 #asks=3 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=3 #asks=4 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000013 to attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000013, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ]) to task attempt_1430213948957_0001_m_000012_0 on node host-IP117:64318 2015-04-28 15:10:46,587 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP117 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:46,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:3 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:10 CompletedReds:0 ContAlloc:13 ContRel:0 HostLocal:13 RackLocal:0 2015-04-28 15:10:46,587 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:46,587 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000012_0 of type TA_ASSIGNED 2015-04-28 15:10:46,587 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:46,588 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:46,588 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000012_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:46,588 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:46,588 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:46,588 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000013 taskAttempt attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:46,588 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:46,588 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000013 taskAttempt attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:46,588 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:46,589 INFO [ContainerLauncher #2] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:46,589 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:46,589 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:46,589 DEBUG [ContainerLauncher #2] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:46,589 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:46,590 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:46,590 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:46,591 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:46,591 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:46,592 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"E57xUBsSK6voCMRfIeNf1hxBPnozwvob7O5eA4zx\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:46,592 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@4f876c79 2015-04-28 15:10:46,593 INFO [ContainerLauncher #2] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:46,593 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:46,593 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:46,593 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:46,593 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:46,593 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:46,593 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"E57xUBsSK6voCMRfIeNf1hxBPnozwvob7O5eA4zx\",nc=00000001,cnonce=\"Pe/602N7om6DjamLeyacD52fZSx+yjcXt/AomwT3\",digest-uri=\"/default\",maxbuf=65536,response=ac4411b73992e8908ef25ee70d06fe89,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:46,596 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=fcc07d98474269f4458324b673f6e900" 2015-04-28 15:10:46,597 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:46,597 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:46,599 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #65 2015-04-28 15:10:46,605 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #65 2015-04-28 15:10:46,605 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 15ms 2015-04-28 15:10:46,605 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:46,605 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:46,605 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000012_0 : 13562 2015-04-28 15:10:46,605 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:46,605 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000012_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:46,606 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000012_0] using containerId: [container_1430213948957_0001_01_000013 on NM: [host-IP117:64318] 2015-04-28 15:10:46,606 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000012_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:46,606 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:46,606 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:46,606 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:46,606 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:46,606 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:46,606 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000012 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:46,606 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000012 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:46,606 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:46,606 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=20, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=76288 2015-04-28 15:10:46,606 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:46,648 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:46,648 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: statusUpdate(attempt_1430213948957_0001_m_000009_0, org.apache.hadoop.mapred.MapTaskStatus@4a2b119b), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:46,648 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:46,648 INFO [IPC Server handler 10 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000009_0 is : 1.0 2015-04-28 15:10:46,649 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:46,649 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:46,650 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000009_0 of type TA_UPDATE 2015-04-28 15:10:46,650 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000009_0, org.apache.hadoop.mapred.MapTaskStatus@4a2b119b), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#7 Retry#0 2015-04-28 15:10:46,650 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:46,650 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000009_0, org.apache.hadoop.mapred.MapTaskStatus@4a2b119b), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:46,651 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:46,652 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: done(attempt_1430213948957_0001_m_000009_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:46,652 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:46,652 INFO [IPC Server handler 15 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:46,652 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 0 procesingTime= 0 2015-04-28 15:10:46,652 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:46,653 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: responding to done(attempt_1430213948957_0001_m_000009_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#8 Retry#0 2015-04-28 15:10:46,653 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000009_0 of type TA_DONE 2015-04-28 15:10:46,653 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: responding to done(attempt_1430213948957_0001_m_000009_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16351 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:46,653 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000009_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:46,653 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000012 taskAttempt attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:46,653 INFO [ContainerLauncher #3] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000012 taskAttempt attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:46,653 INFO [ContainerLauncher #3] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:46,653 INFO [ContainerLauncher #3] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:46,653 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:46,653 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:46,653 DEBUG [ContainerLauncher #3] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:46,654 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:46,654 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:46,654 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:46,655 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP117:16351. Number of active connections: 1 2015-04-28 15:10:46,655 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:46,655 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:46,657 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"iJpGEWpXQJqrj5uO7qFtVbJFxrg7wrVBJFjSqf6X\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:46,657 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@490e5f63 2015-04-28 15:10:46,658 INFO [ContainerLauncher #3] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:46,658 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:46,658 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:46,658 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:46,658 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:46,658 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:46,659 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"iJpGEWpXQJqrj5uO7qFtVbJFxrg7wrVBJFjSqf6X\",nc=00000001,cnonce=\"SFHhXhgQXRgD2RksjNmFOuMo+YURbu5pIrpoWz3o\",digest-uri=\"/default\",maxbuf=65536,response=cf2cb3fae691022c8f94a815df8edc77,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:46,662 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=5a3ada531a9c24a060ea5b89e2b33d48" 2015-04-28 15:10:46,662 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:46,662 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:46,662 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #66 2015-04-28 15:10:46,666 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #66 2015-04-28 15:10:46,666 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:46,667 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:46,667 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 13ms 2015-04-28 15:10:46,667 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:46,667 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000009_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:46,667 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000009_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:46,667 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:46,667 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:46,667 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:46,667 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:46,667 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000009 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:46,667 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000009_0 2015-04-28 15:10:46,667 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000009 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:46,667 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:46,667 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:46,667 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:46,668 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:46,668 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:46,668 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 11 2015-04-28 15:10:46,668 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:46,673 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:46,678 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:46,678 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=80324 lastFlushOffset=76755 createNewBlock=false 2015-04-28 15:10:46,678 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 20 2015-04-28 15:10:46,678 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 20 2015-04-28 15:10:46,678 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 20 offsetInBlock: 76288 lastPacketInBlock: false lastByteOffsetInBlock: 80324 2015-04-28 15:10:46,681 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 20 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1604725 flag: 0 flag: 0 2015-04-28 15:10:46,681 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:46,681 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:46,682 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=21, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=79872 2015-04-28 15:10:46,682 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:46,682 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=83032 lastFlushOffset=80324 createNewBlock=false 2015-04-28 15:10:46,682 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 21 2015-04-28 15:10:46,682 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 21 2015-04-28 15:10:46,683 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 21 offsetInBlock: 79872 lastPacketInBlock: false lastByteOffsetInBlock: 83032 2015-04-28 15:10:46,689 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 21 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 5790039 flag: 0 flag: 0 2015-04-28 15:10:46,689 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:46,985 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #158 2015-04-28 15:10:46,985 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#158 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:46,985 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:46,985 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 0 2015-04-28 15:10:46,985 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#158 Retry#0 2015-04-28 15:10:46,986 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#158 Retry#0 Wrote 166 bytes. 2015-04-28 15:10:46,987 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #159 2015-04-28 15:10:46,987 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#159 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:46,987 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:46,988 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:46,988 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#159 Retry#0 2015-04-28 15:10:46,988 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#159 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:46,990 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #160 2015-04-28 15:10:46,990 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#160 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:46,990 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:46,990 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:46,990 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#160 Retry#0 2015-04-28 15:10:46,990 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#160 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:47,252 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:47,253 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: statusUpdate(attempt_1430213948957_0001_m_000011_0, org.apache.hadoop.mapred.MapTaskStatus@4d3f05f6), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:47,253 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:47,253 INFO [IPC Server handler 2 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000011_0 is : 0.0 2015-04-28 15:10:47,254 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:47,254 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:47,254 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000011_0, org.apache.hadoop.mapred.MapTaskStatus@4d3f05f6), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#5 Retry#0 2015-04-28 15:10:47,254 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000011_0 of type TA_UPDATE 2015-04-28 15:10:47,254 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000011_0, org.apache.hadoop.mapred.MapTaskStatus@4d3f05f6), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:47,254 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:47,314 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:47,315 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: statusUpdate(attempt_1430213948957_0001_m_000011_0, org.apache.hadoop.mapred.MapTaskStatus@d5e7c5a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:47,315 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:47,315 INFO [IPC Server handler 5 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000011_0 is : 1.0 2015-04-28 15:10:47,316 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:47,316 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:47,316 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000011_0, org.apache.hadoop.mapred.MapTaskStatus@d5e7c5a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#7 Retry#0 2015-04-28 15:10:47,316 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000011_0, org.apache.hadoop.mapred.MapTaskStatus@d5e7c5a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:47,317 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000011_0 of type TA_UPDATE 2015-04-28 15:10:47,317 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:47,318 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:47,318 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: done(attempt_1430213948957_0001_m_000011_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:47,318 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:47,318 INFO [IPC Server handler 9 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:47,318 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 0 procesingTime= 0 2015-04-28 15:10:47,318 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:47,319 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000011_0 of type TA_DONE 2015-04-28 15:10:47,319 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to done(attempt_1430213948957_0001_m_000011_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#8 Retry#0 2015-04-28 15:10:47,319 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000011_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:47,319 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: responding to done(attempt_1430213948957_0001_m_000011_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63581 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:47,319 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000014 taskAttempt attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:47,319 INFO [ContainerLauncher #4] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000014 taskAttempt attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:47,319 INFO [ContainerLauncher #4] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:47,319 INFO [ContainerLauncher #4] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:47,319 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:47,320 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:47,320 DEBUG [ContainerLauncher #4] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:47,320 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:47,320 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:47,320 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:47,321 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP143:63581. Number of active connections: 0 2015-04-28 15:10:47,321 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:47,321 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:47,324 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"cyxGUBLnetvqqP3Mpdku7T59EKogOW8bhHanYnMr\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:47,324 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@140d66bc 2015-04-28 15:10:47,324 INFO [ContainerLauncher #4] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:47,324 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:47,325 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:47,325 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:47,325 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:47,325 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:47,325 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"cyxGUBLnetvqqP3Mpdku7T59EKogOW8bhHanYnMr\",nc=00000001,cnonce=\"deWWiCOJqL68L7n2U29Jj7qYtPFA0loYSg1YgC2U\",digest-uri=\"/default\",maxbuf=65536,response=2a192be58c23317860dd6def38fbe610,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:47,328 DEBUG [ContainerLauncher #4] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=ef79971512792c60fc31c7edf7656911" 2015-04-28 15:10:47,328 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:47,332 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:47,333 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #67 2015-04-28 15:10:47,340 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #67 2015-04-28 15:10:47,341 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:47,341 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:47,341 DEBUG [ContainerLauncher #4] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 21ms 2015-04-28 15:10:47,341 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:47,341 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000011_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:47,341 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000011_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:47,341 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:47,341 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:47,341 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:47,341 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:47,341 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:47,341 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000011 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:47,341 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000011_0 2015-04-28 15:10:47,342 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000011 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:47,342 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:47,342 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:47,342 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:47,342 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:47,342 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:47,342 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 12 2015-04-28 15:10:47,342 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:47,344 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=22, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=82944 2015-04-28 15:10:47,344 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:47,344 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=86167 lastFlushOffset=83032 createNewBlock=false 2015-04-28 15:10:47,344 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 22 2015-04-28 15:10:47,344 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 22 2015-04-28 15:10:47,344 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 22 offsetInBlock: 82944 lastPacketInBlock: false lastByteOffsetInBlock: 86167 2015-04-28 15:10:47,346 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 22 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1374024 flag: 0 flag: 0 2015-04-28 15:10:47,347 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:47,347 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:47,347 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=23, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=86016 2015-04-28 15:10:47,347 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:47,347 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=88875 lastFlushOffset=86167 createNewBlock=false 2015-04-28 15:10:47,347 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 23 2015-04-28 15:10:47,347 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 23 2015-04-28 15:10:47,348 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 23 offsetInBlock: 86016 lastPacketInBlock: false lastByteOffsetInBlock: 88875 2015-04-28 15:10:47,350 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 23 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1306429 flag: 0 flag: 0 2015-04-28 15:10:47,350 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:47,587 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:3 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:12 CompletedReds:0 ContAlloc:13 ContRel:0 HostLocal:13 RackLocal:0 2015-04-28 15:10:47,588 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #68 2015-04-28 15:10:47,598 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #68 2015-04-28 15:10:47,599 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 11ms 2015-04-28 15:10:47,599 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=2 finishedContainers=2 resourcelimit= knownNMs=2 2015-04-28 15:10:47,599 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000016, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] 2015-04-28 15:10:47,599 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000015, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ] 2015-04-28 15:10:47,599 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000011 2015-04-28 15:10:47,599 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000010 2015-04-28 15:10:47,599 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:47,599 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 2 2015-04-28 15:10:47,599 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000016 with priority 20 to NM host-IP117:64318 2015-04-28 15:10:47,599 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000008_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:47,599 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000015 with priority 20 to NM host-IP143:64318 2015-04-28 15:10:47,599 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:47,599 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:47,599 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000008_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:47,600 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=3 #asks=0 2015-04-28 15:10:47,600 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000008_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:47,600 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:47,600 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000010_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:47,600 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:47,600 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000010_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:47,600 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=2 #asks=1 2015-04-28 15:10:47,600 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000010_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:47,600 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=3 #asks=1 2015-04-28 15:10:47,600 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=2 #asks=2 2015-04-28 15:10:47,600 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=3 #asks=2 2015-04-28 15:10:47,600 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=2 #asks=3 2015-04-28 15:10:47,600 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=3 #asks=3 2015-04-28 15:10:47,600 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=2 #asks=4 2015-04-28 15:10:47,600 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:47,600 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000016 to attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:47,600 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000013_0 of type TA_ASSIGNED 2015-04-28 15:10:47,600 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000016, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ]) to task attempt_1430213948957_0001_m_000013_0 on node host-IP117:64318 2015-04-28 15:10:47,600 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:47,600 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP117 2015-04-28 15:10:47,600 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:47,600 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:47,600 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP143 2015-04-28 15:10:47,600 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=2 #asks=4 2015-04-28 15:10:47,601 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=1 #asks=4 2015-04-28 15:10:47,601 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=2 #asks=4 2015-04-28 15:10:47,601 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=1 #asks=4 2015-04-28 15:10:47,601 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=2 #asks=4 2015-04-28 15:10:47,601 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=1 #asks=4 2015-04-28 15:10:47,601 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:47,601 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=2 #asks=4 2015-04-28 15:10:47,601 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=1 #asks=4 2015-04-28 15:10:47,601 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000015 to attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:47,601 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000015, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ]) to task attempt_1430213948957_0001_m_000014_0 on node host-IP143:64318 2015-04-28 15:10:47,601 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP143 2015-04-28 15:10:47,601 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:47,601 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:47,601 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:1 ScheduledReds:0 AssignedMaps:5 AssignedReds:0 CompletedMaps:12 CompletedReds:0 ContAlloc:15 ContRel:0 HostLocal:15 RackLocal:0 2015-04-28 15:10:47,602 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000013_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:47,602 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:47,602 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:47,602 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:47,602 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000014_0 of type TA_ASSIGNED 2015-04-28 15:10:47,602 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:47,602 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP143 to /default-rack 2015-04-28 15:10:47,604 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000014_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:47,604 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:47,604 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:47,604 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000016 taskAttempt attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:47,604 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:47,604 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000015 taskAttempt attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:47,604 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:47,605 INFO [ContainerLauncher #5] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000016 taskAttempt attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:47,605 INFO [ContainerLauncher #5] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:47,605 INFO [ContainerLauncher #5] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:47,605 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:47,606 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:47,606 DEBUG [ContainerLauncher #5] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:47,606 INFO [ContainerLauncher #6] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000015 taskAttempt attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:47,606 INFO [ContainerLauncher #6] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:47,606 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:47,606 INFO [ContainerLauncher #6] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:47,607 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:47,607 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:47,608 DEBUG [ContainerLauncher #6] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:47,608 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:47,608 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:47,608 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:47,609 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:47,609 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:47,609 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:47,609 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:47,609 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:47,609 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:47,610 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"hq1GACGp1tV/sfh8MmxXY5NL2boAMtKkD3WTrGE3\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:47,610 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@1a29ddd 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"dSRWMU8VC5JEC4wAY36HCNlJqzQn+4V08HOXDHsF\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@3d18c18c 2015-04-28 15:10:47,611 INFO [ContainerLauncher #5] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:47,611 INFO [ContainerLauncher #6] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"dSRWMU8VC5JEC4wAY36HCNlJqzQn+4V08HOXDHsF\",nc=00000001,cnonce=\"KvDuRHFSFFvbzYmwI2X4bzh35/BoIHRbm3GUEqMc\",digest-uri=\"/default\",maxbuf=65536,response=cdf69847d505bffad8d1b7dc0a08e493,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:47,611 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"hq1GACGp1tV/sfh8MmxXY5NL2boAMtKkD3WTrGE3\",nc=00000001,cnonce=\"5MBha+jGUJCcZHg3WUCFSw91x9Rw7+eD3Dkcb99j\",digest-uri=\"/default\",maxbuf=65536,response=d3495094fed04bf669b2298581c9f415,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:47,613 DEBUG [ContainerLauncher #6] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=4222a9c4ac7018257cd1d87e886b5613" 2015-04-28 15:10:47,614 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:47,614 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:47,614 DEBUG [ContainerLauncher #5] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=c1278587087964cff465b3cd055d7062" 2015-04-28 15:10:47,615 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #70 2015-04-28 15:10:47,615 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:47,615 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #69 2015-04-28 15:10:47,616 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 3 2015-04-28 15:10:47,619 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #70 2015-04-28 15:10:47,619 DEBUG [ContainerLauncher #6] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 10ms 2015-04-28 15:10:47,620 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:47,620 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 2 2015-04-28 15:10:47,620 INFO [ContainerLauncher #6] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000014_0 : 13562 2015-04-28 15:10:47,620 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:47,620 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000014_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:47,620 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000014_0] using containerId: [container_1430213948957_0001_01_000015 on NM: [host-IP143:64318] 2015-04-28 15:10:47,620 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000014_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:47,620 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:47,620 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:47,620 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:47,620 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:47,620 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:47,620 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:47,621 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000014 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:47,621 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000014 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:47,621 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=24, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=88576 2015-04-28 15:10:47,621 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:47,621 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #69 2015-04-28 15:10:47,621 DEBUG [ContainerLauncher #5] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 14ms 2015-04-28 15:10:47,621 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:47,621 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:47,622 INFO [ContainerLauncher #5] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000013_0 : 13562 2015-04-28 15:10:47,622 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:47,623 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000013_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:47,623 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000013_0] using containerId: [container_1430213948957_0001_01_000016 on NM: [host-IP117:64318] 2015-04-28 15:10:47,623 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000013_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:47,623 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:47,623 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:47,623 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:47,623 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:47,623 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:47,623 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:47,623 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000013 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:47,623 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000013 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:47,623 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:47,992 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #161 2015-04-28 15:10:47,993 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#161 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:47,993 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:47,993 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 0 2015-04-28 15:10:47,993 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#161 Retry#0 2015-04-28 15:10:47,993 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#161 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:47,995 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #162 2015-04-28 15:10:47,995 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#162 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:47,995 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:47,996 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:47,996 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#162 Retry#0 2015-04-28 15:10:47,996 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#162 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:47,998 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #163 2015-04-28 15:10:47,998 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#163 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:47,998 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:47,999 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:47,999 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#163 Retry#0 2015-04-28 15:10:47,999 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#163 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:48,011 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP117:16356; # active connections: 1; # queued calls: 0 2015-04-28 15:10:48,131 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:48,131 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:48,131 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"Ff6B2IEjVcwjSeB41kkYHJl4alcR9N3h0sB1am/F\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:48,131 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16356 Call#-33 Retry#-1 2015-04-28 15:10:48,131 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16356 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:48,247 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:48,247 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:48,248 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:48,248 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:48,248 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:48,248 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:48,248 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:48,248 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:48,248 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=551b97cd8dbd04241ac7e54f88851ef2" 2015-04-28 15:10:48,248 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16356 Call#-33 Retry#-1 2015-04-28 15:10:48,248 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16356 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:48,263 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:48,263 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:48,263 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:48,263 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@267f2209), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:48,264 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:48,264 INFO [IPC Server handler 4 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000013 asked for a task 2015-04-28 15:10:48,264 INFO [IPC Server handler 4 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000013 given task: attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:48,264 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:48,272 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@267f2209), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#0 Retry#0 2015-04-28 15:10:48,272 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@267f2209), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:48,496 DEBUG [IPC Server idle connection scanner for port 48332] org.apache.hadoop.ipc.Server: IPC Server idle connection scanner for port 48332: task running 2015-04-28 15:10:48,602 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #71 2015-04-28 15:10:48,611 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #71 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 10ms 2015-04-28 15:10:48,612 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 0 newContainers=2 finishedContainers=2 resourcelimit= knownNMs=2 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000017, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000018, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ] 2015-04-28 15:10:48,612 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000012 2015-04-28 15:10:48,612 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000014 2015-04-28 15:10:48,612 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:48,612 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 2 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000017 with priority 20 to NM host-IP117:64318 2015-04-28 15:10:48,612 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000009_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000018 with priority 20 to NM host-IP143:64318 2015-04-28 15:10:48,612 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:48,612 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000009_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Host matched to the request list host-IP117 2015-04-28 15:10:48,612 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000009_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:48,612 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=1 #asks=0 2015-04-28 15:10:48,612 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000011_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:48,612 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:48,612 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000011_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:48,612 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP143 numContainers=0 #asks=1 2015-04-28 15:10:48,612 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000011_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=1 #asks=1 2015-04-28 15:10:48,612 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=host-IP117 numContainers=0 #asks=2 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=1 #asks=2 2015-04-28 15:10:48,612 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=/default-rack numContainers=0 #asks=3 2015-04-28 15:10:48,612 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=1 #asks=3 2015-04-28 15:10:48,613 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=20 resourceName=* numContainers=0 #asks=4 2015-04-28 15:10:48,613 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000017 to attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:48,613 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:48,613 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000017, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP117:64318 }, ]) to task attempt_1430213948957_0001_m_000015_0 on node host-IP117:64318 2015-04-28 15:10:48,613 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000015_0 of type TA_ASSIGNED 2015-04-28 15:10:48,613 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned based on host match host-IP117 2015-04-28 15:10:48,613 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Releasing unassigned container Container: [ContainerId: container_1430213948957_0001_01_000018, NodeId: host-IP143:64318, NodeHttpAddress: host-IP143:64320, Resource: , Priority: 20, Token: Token { kind: ContainerToken, service: IP143:64318 }, ] 2015-04-28 15:10:48,613 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:48,613 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:48,613 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:48,613 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:0 ScheduledReds:0 AssignedMaps:4 AssignedReds:0 CompletedMaps:12 CompletedReds:0 ContAlloc:17 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:10:48,613 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:48,614 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000015_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:48,614 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:48,614 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:48,614 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000017 taskAttempt attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:48,614 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:48,614 INFO [ContainerLauncher #7] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000017 taskAttempt attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:48,614 INFO [ContainerLauncher #7] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:48,614 INFO [ContainerLauncher #7] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:48,614 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:48,614 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:48,614 DEBUG [ContainerLauncher #7] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:48,615 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:48,615 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:48,615 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:48,616 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:48,616 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:48,618 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"WWf+XWVfGsM22vLiWuMyJwdfFokVtCf1kCc/gO45\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:48,618 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@729e0aad 2015-04-28 15:10:48,618 INFO [ContainerLauncher #7] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:48,618 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:48,618 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:48,619 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:48,619 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:48,619 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:48,619 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"WWf+XWVfGsM22vLiWuMyJwdfFokVtCf1kCc/gO45\",nc=00000001,cnonce=\"aNHtS+WOhKS+lQw/18PcBFztIxlg+Uc6MD0M9689\",digest-uri=\"/default\",maxbuf=65536,response=24b2a8bfa7fc63d7251b3f11400f74ac,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:48,622 DEBUG [ContainerLauncher #7] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=af6178a345e3339743ebcfa476b3b0eb" 2015-04-28 15:10:48,622 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:48,622 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:48,623 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #72 2015-04-28 15:10:48,633 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #72 2015-04-28 15:10:48,633 DEBUG [ContainerLauncher #7] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 18ms 2015-04-28 15:10:48,633 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:48,633 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:48,634 INFO [ContainerLauncher #7] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_m_000015_0 : 13562 2015-04-28 15:10:48,634 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:48,634 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000015_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:48,634 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_m_000015_0] using containerId: [container_1430213948957_0001_01_000017 on NM: [host-IP117:64318] 2015-04-28 15:10:48,634 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000015_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:48,634 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:48,634 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:48,634 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_STARTED 2015-04-28 15:10:48,634 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:48,634 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:48,634 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:48,634 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000015 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:48,634 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000015 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:48,635 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_STARTED 2015-04-28 15:10:48,908 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP143:63593; # active connections: 2; # queued calls: 0 2015-04-28 15:10:48,979 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:48,979 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:48,979 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"+89YSdJ7vUQIAacrmiT6Y3q9pNFJXFpUYklpmaEe\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:48,979 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63593 Call#-33 Retry#-1 2015-04-28 15:10:48,979 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63593 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:49,001 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #164 2015-04-28 15:10:49,001 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#164 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:49,002 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:49,002 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:10:49,002 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#164 Retry#0 2015-04-28 15:10:49,002 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#164 Retry#0 Wrote 33 bytes. 2015-04-28 15:10:49,004 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #165 2015-04-28 15:10:49,004 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#165 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:49,004 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:49,004 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:49,004 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#165 Retry#0 2015-04-28 15:10:49,005 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#165 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:49,006 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #166 2015-04-28 15:10:49,006 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#166 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:49,007 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:49,007 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:49,007 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#166 Retry#0 2015-04-28 15:10:49,007 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#166 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:49,057 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:49,057 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:49,057 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:49,057 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:49,057 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:49,057 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:49,057 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:49,057 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:49,057 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=ffe167031b7febc65c2717311a3e7ee4" 2015-04-28 15:10:49,057 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63593 Call#-33 Retry#-1 2015-04-28 15:10:49,057 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP143:63593 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:49,068 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:49,068 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:49,068 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:49,068 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@23204108), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:49,069 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:49,069 INFO [IPC Server handler 20 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000015 asked for a task 2015-04-28 15:10:49,069 INFO [IPC Server handler 20 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000015 given task: attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:49,069 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:49,070 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@23204108), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#0 Retry#0 2015-04-28 15:10:49,070 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@23204108), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:49,165 DEBUG [IPC Server idle connection scanner for port 21207] org.apache.hadoop.ipc.Server: IPC Server idle connection scanner for port 21207: task running 2015-04-28 15:10:49,614 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #73 2015-04-28 15:10:49,628 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #73 2015-04-28 15:10:49,628 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 15ms 2015-04-28 15:10:49,628 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=4 release= 1 newContainers=0 finishedContainers=1 resourcelimit= knownNMs=2 2015-04-28 15:10:49,628 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: headroom= 2015-04-28 15:10:49,628 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000018 2015-04-28 15:10:49,628 ERROR [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Container complete event for unknown container id container_1430213948957_0001_01_000018 2015-04-28 15:10:49,629 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:49,629 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:49,656 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP117:16357; # active connections: 3; # queued calls: 0 2015-04-28 15:10:49,788 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:49,788 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:49,788 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"7i4AEucj0aKRkJzTbP4Q353/jN6xE6qmUjjvM9q4\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:49,788 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16357 Call#-33 Retry#-1 2015-04-28 15:10:49,789 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16357 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:49,963 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:49,963 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:49,963 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:49,963 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:49,963 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:49,963 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:49,963 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:49,963 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:49,963 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=17c10aec082ba643e14f665df39c67ab" 2015-04-28 15:10:49,964 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16357 Call#-33 Retry#-1 2015-04-28 15:10:49,964 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16357 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:49,993 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:49,994 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:49,994 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:49,995 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@2de7bb1c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:49,995 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:49,995 INFO [IPC Server handler 20 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000016 asked for a task 2015-04-28 15:10:49,995 INFO [IPC Server handler 20 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000016 given task: attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:49,995 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 0 procesingTime= 0 2015-04-28 15:10:49,996 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@2de7bb1c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#0 Retry#0 2015-04-28 15:10:49,996 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@2de7bb1c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:50,012 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #167 2015-04-28 15:10:50,012 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#167 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:50,013 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:50,013 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:10:50,014 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#167 Retry#0 2015-04-28 15:10:50,014 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#167 Retry#0 Wrote 33 bytes. 2015-04-28 15:10:50,015 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #168 2015-04-28 15:10:50,016 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#168 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:50,016 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:50,016 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:50,016 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#168 Retry#0 2015-04-28 15:10:50,017 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#168 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:50,019 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #169 2015-04-28 15:10:50,019 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#169 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:50,019 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:50,019 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:50,020 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#169 Retry#0 2015-04-28 15:10:50,020 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#169 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:50,436 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:50,436 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: statusUpdate(attempt_1430213948957_0001_m_000014_0, org.apache.hadoop.mapred.MapTaskStatus@53b84c35), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:50,436 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:50,437 INFO [IPC Server handler 16 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000014_0 is : 0.0 2015-04-28 15:10:50,437 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 1 procesingTime= 0 2015-04-28 15:10:50,437 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:50,438 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000014_0, org.apache.hadoop.mapred.MapTaskStatus@53b84c35), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#5 Retry#0 2015-04-28 15:10:50,438 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000014_0, org.apache.hadoop.mapred.MapTaskStatus@53b84c35), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:50,438 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000014_0 of type TA_UPDATE 2015-04-28 15:10:50,439 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:50,497 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:50,497 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: statusUpdate(attempt_1430213948957_0001_m_000014_0, org.apache.hadoop.mapred.MapTaskStatus@44ecf6ae), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:50,497 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:50,498 INFO [IPC Server handler 10 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000014_0 is : 1.0 2015-04-28 15:10:50,499 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 1 procesingTime= 1 2015-04-28 15:10:50,499 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:50,499 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000014_0, org.apache.hadoop.mapred.MapTaskStatus@44ecf6ae), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#7 Retry#0 2015-04-28 15:10:50,499 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000014_0, org.apache.hadoop.mapred.MapTaskStatus@44ecf6ae), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:50,499 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000014_0 of type TA_UPDATE 2015-04-28 15:10:50,499 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:50,500 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:50,500 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: done(attempt_1430213948957_0001_m_000014_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:50,500 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:50,500 INFO [IPC Server handler 15 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:50,500 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 0 procesingTime= 0 2015-04-28 15:10:50,500 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:50,501 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: responding to done(attempt_1430213948957_0001_m_000014_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#8 Retry#0 2015-04-28 15:10:50,501 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: responding to done(attempt_1430213948957_0001_m_000014_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP143:63593 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:50,501 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000014_0 of type TA_DONE 2015-04-28 15:10:50,501 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000014_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:50,501 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000015 taskAttempt attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:50,501 INFO [ContainerLauncher #8] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000015 taskAttempt attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:50,501 INFO [ContainerLauncher #8] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:50,502 INFO [ContainerLauncher #8] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP143:64318 2015-04-28 15:10:50,502 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:50,502 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:50,502 DEBUG [ContainerLauncher #8] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:50,502 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:50,503 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:50,503 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: Connecting to host-IP143/IP143:64318 2015-04-28 15:10:50,503 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:50,504 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:50,504 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP143:63593. Number of active connections: 2 2015-04-28 15:10:50,505 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"mHidLp5AtzAsbnmlFZMjEkG3M941xWpB5RV7/ahh\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:50,505 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@1e5ddde6 2015-04-28 15:10:50,505 INFO [ContainerLauncher #8] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP143:64318. Current token is Kind: NMToken, Service: IP143:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP143" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:50,505 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:50,505 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:50,505 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:50,505 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:50,505 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:50,506 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTQzEL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"mHidLp5AtzAsbnmlFZMjEkG3M941xWpB5RV7/ahh\",nc=00000001,cnonce=\"7KZ4TIYH0AC9e/iCQqsSpn9VQSAoBCXc73FhKFxg\",digest-uri=\"/default\",maxbuf=65536,response=ad26e2569c4657a0b2e1378e01a651f4,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:50,508 DEBUG [ContainerLauncher #8] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=0ff1c1932c4f238c331a19c9bc408610" 2015-04-28 15:10:50,508 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:50,512 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:50,512 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 sending #74 2015-04-28 15:10:50,517 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001 got value #74 2015-04-28 15:10:50,517 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:50,517 DEBUG [IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP143/IP143:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:50,518 DEBUG [ContainerLauncher #8] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 15ms 2015-04-28 15:10:50,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:50,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000014_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:50,518 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000014_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:50,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:50,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:50,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:50,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:50,518 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:50,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000014 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:50,518 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000014_0 2015-04-28 15:10:50,518 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000014 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:50,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:50,518 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:50,519 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:50,519 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:50,519 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:50,519 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 13 2015-04-28 15:10:50,519 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:50,520 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:50,520 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=93312 lastFlushOffset=88875 createNewBlock=false 2015-04-28 15:10:50,520 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 24 2015-04-28 15:10:50,520 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 24 2015-04-28 15:10:50,521 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 24 offsetInBlock: 88576 lastPacketInBlock: false lastByteOffsetInBlock: 93312 2015-04-28 15:10:50,523 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 24 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1507096 flag: 0 flag: 0 2015-04-28 15:10:50,523 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:50,523 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:50,524 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=25, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=93184 2015-04-28 15:10:50,524 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:50,524 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=96020 lastFlushOffset=93312 createNewBlock=false 2015-04-28 15:10:50,524 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 25 2015-04-28 15:10:50,524 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 25 2015-04-28 15:10:50,524 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 25 offsetInBlock: 93184 lastPacketInBlock: false lastByteOffsetInBlock: 96020 2015-04-28 15:10:50,534 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 25 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 3506235 flag: 0 flag: 0 2015-04-28 15:10:50,535 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:50,629 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:0 ScheduledReds:0 AssignedMaps:4 AssignedReds:0 CompletedMaps:13 CompletedReds:0 ContAlloc:17 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:10:50,629 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #75 2015-04-28 15:10:50,632 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #75 2015-04-28 15:10:50,632 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 3ms 2015-04-28 15:10:50,633 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:50,633 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:50,765 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:50,766 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: statusUpdate(attempt_1430213948957_0001_m_000012_0, org.apache.hadoop.mapred.MapTaskStatus@173c639d), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:50,766 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:50,766 INFO [IPC Server handler 17 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000012_0 is : 0.0 2015-04-28 15:10:50,767 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:50,767 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:50,768 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000012_0, org.apache.hadoop.mapred.MapTaskStatus@173c639d), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#5 Retry#0 2015-04-28 15:10:50,769 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000012_0 of type TA_UPDATE 2015-04-28 15:10:50,769 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000012_0, org.apache.hadoop.mapred.MapTaskStatus@173c639d), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:50,769 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:50,933 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:50,933 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: statusUpdate(attempt_1430213948957_0001_m_000012_0, org.apache.hadoop.mapred.MapTaskStatus@29d82a78), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:50,934 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:50,934 INFO [IPC Server handler 20 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000012_0 is : 1.0 2015-04-28 15:10:50,935 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 1 procesingTime= 1 2015-04-28 15:10:50,935 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:50,935 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000012_0, org.apache.hadoop.mapred.MapTaskStatus@29d82a78), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#7 Retry#0 2015-04-28 15:10:50,935 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000012_0 of type TA_UPDATE 2015-04-28 15:10:50,935 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000012_0, org.apache.hadoop.mapred.MapTaskStatus@29d82a78), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:50,935 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:50,937 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:50,937 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 22 on 21207: done(attempt_1430213948957_0001_m_000012_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:50,937 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:50,938 INFO [IPC Server handler 22 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:50,938 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 1 procesingTime= 0 2015-04-28 15:10:50,938 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:50,938 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000012_0 of type TA_DONE 2015-04-28 15:10:50,938 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 22 on 21207: responding to done(attempt_1430213948957_0001_m_000012_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#8 Retry#0 2015-04-28 15:10:50,938 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000012_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:50,938 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 22 on 21207: responding to done(attempt_1430213948957_0001_m_000012_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16356 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:50,938 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000013 taskAttempt attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:50,938 INFO [ContainerLauncher #9] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000013 taskAttempt attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:50,939 INFO [ContainerLauncher #9] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:50,939 INFO [ContainerLauncher #9] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:50,939 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:50,940 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:50,940 DEBUG [ContainerLauncher #9] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:50,940 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:50,941 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:50,941 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:50,941 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP117:16356. Number of active connections: 1 2015-04-28 15:10:50,942 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:50,942 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:50,944 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"20EDZ+jkILsD7aIlqLXX1TIbjf1tyonltC1EUZ14\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:50,944 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@113f7351 2015-04-28 15:10:50,944 INFO [ContainerLauncher #9] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:50,945 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:50,945 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:50,945 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:50,945 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:50,945 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:50,946 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"20EDZ+jkILsD7aIlqLXX1TIbjf1tyonltC1EUZ14\",nc=00000001,cnonce=\"V2UZG8ltznz6PrtaojlKYGI5GjXNMIIwwpT771VY\",digest-uri=\"/default\",maxbuf=65536,response=a02e69909317be89bb1c98e98e9b29a7,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:50,953 DEBUG [ContainerLauncher #9] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=10f42dc04ffd22a08c8e907dffd05edd" 2015-04-28 15:10:50,953 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:50,953 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:50,954 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #76 2015-04-28 15:10:50,959 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #76 2015-04-28 15:10:50,959 DEBUG [ContainerLauncher #9] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 19ms 2015-04-28 15:10:50,959 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:50,959 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:50,960 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:50,960 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000012_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:50,960 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000012_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:50,960 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:50,960 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:50,960 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:50,960 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:50,960 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:50,960 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000012 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:50,961 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000012_0 2015-04-28 15:10:50,961 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000012 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:50,961 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:50,961 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:50,961 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:50,961 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:50,962 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:50,962 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 14 2015-04-28 15:10:50,962 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:50,962 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=26, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=95744 2015-04-28 15:10:50,962 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:50,962 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=99179 lastFlushOffset=96020 createNewBlock=false 2015-04-28 15:10:50,962 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 26 2015-04-28 15:10:50,962 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 26 2015-04-28 15:10:50,963 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 26 offsetInBlock: 95744 lastPacketInBlock: false lastByteOffsetInBlock: 99179 2015-04-28 15:10:50,976 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 26 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 12128864 flag: 0 flag: 0 2015-04-28 15:10:50,976 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:50,976 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:50,977 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=27, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=98816 2015-04-28 15:10:50,977 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:50,977 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=101888 lastFlushOffset=99179 createNewBlock=false 2015-04-28 15:10:50,977 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 27 2015-04-28 15:10:50,977 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 27 2015-04-28 15:10:50,977 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 27 offsetInBlock: 98816 lastPacketInBlock: false lastByteOffsetInBlock: 101888 2015-04-28 15:10:50,980 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 27 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1529867 flag: 0 flag: 0 2015-04-28 15:10:50,980 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:51,028 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #170 2015-04-28 15:10:51,029 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#170 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:51,029 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:51,029 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:10:51,029 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#170 Retry#0 2015-04-28 15:10:51,029 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#170 Retry#0 Wrote 166 bytes. 2015-04-28 15:10:51,031 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #171 2015-04-28 15:10:51,031 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#171 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:51,032 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:51,032 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:51,032 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#171 Retry#0 2015-04-28 15:10:51,032 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#171 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:51,041 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #172 2015-04-28 15:10:51,041 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#172 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:51,041 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:51,041 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:51,041 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#172 Retry#0 2015-04-28 15:10:51,041 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#172 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:51,386 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP117:16360; # active connections: 2; # queued calls: 0 2015-04-28 15:10:51,501 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:51,501 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:51,502 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"PV+5TyVeB1TfE09BJN30c9yMjJ2UjY/U0XAzkCUl\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:51,502 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16360 Call#-33 Retry#-1 2015-04-28 15:10:51,502 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16360 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:51,627 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:51,627 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:51,628 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:51,628 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:51,628 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:51,628 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:51,628 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:51,628 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:51,628 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=8e09962a14b2bf3419d0ce18a15408ab" 2015-04-28 15:10:51,628 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16360 Call#-33 Retry#-1 2015-04-28 15:10:51,628 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16360 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:51,633 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:0 ScheduledReds:0 AssignedMaps:4 AssignedReds:0 CompletedMaps:14 CompletedReds:0 ContAlloc:17 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:10:51,633 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #77 2015-04-28 15:10:51,636 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #77 2015-04-28 15:10:51,636 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 3ms 2015-04-28 15:10:51,636 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: headroom= 2015-04-28 15:10:51,637 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000015 2015-04-28 15:10:51,637 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:51,637 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:51,637 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:51,637 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000014_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:51,637 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:0 ScheduledReds:0 AssignedMaps:3 AssignedReds:0 CompletedMaps:14 CompletedReds:0 ContAlloc:17 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:10:51,637 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:51,637 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000014_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:51,637 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000014_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:51,645 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:51,645 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:51,645 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:51,646 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@1303e5a2), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:51,646 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:51,647 INFO [IPC Server handler 17 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_m_000017 asked for a task 2015-04-28 15:10:51,647 INFO [IPC Server handler 17 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_m_000017 given task: attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:51,647 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 0 2015-04-28 15:10:51,648 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@1303e5a2), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#0 Retry#0 2015-04-28 15:10:51,649 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@1303e5a2), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#0 Retry#0 Wrote 366 bytes. 2015-04-28 15:10:52,057 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #173 2015-04-28 15:10:52,057 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#173 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:52,057 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:52,058 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:10:52,058 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#173 Retry#0 2015-04-28 15:10:52,058 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#173 Retry#0 Wrote 33 bytes. 2015-04-28 15:10:52,059 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #174 2015-04-28 15:10:52,060 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#174 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:52,060 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:52,060 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:52,060 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#174 Retry#0 2015-04-28 15:10:52,060 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#174 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:52,065 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #175 2015-04-28 15:10:52,065 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#175 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:52,065 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:52,065 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:52,066 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#175 Retry#0 2015-04-28 15:10:52,066 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#175 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:52,431 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:52,431 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: statusUpdate(attempt_1430213948957_0001_m_000013_0, org.apache.hadoop.mapred.MapTaskStatus@6a185272), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:52,432 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:52,433 INFO [IPC Server handler 16 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000013_0 is : 0.0 2015-04-28 15:10:52,433 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 1 procesingTime= 1 2015-04-28 15:10:52,433 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:52,433 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000013_0 of type TA_UPDATE 2015-04-28 15:10:52,433 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000013_0, org.apache.hadoop.mapred.MapTaskStatus@6a185272), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#5 Retry#0 2015-04-28 15:10:52,433 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000013_0, org.apache.hadoop.mapred.MapTaskStatus@6a185272), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:52,433 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:52,541 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:52,542 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: statusUpdate(attempt_1430213948957_0001_m_000013_0, org.apache.hadoop.mapred.MapTaskStatus@13c9f0aa), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:52,542 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:52,542 INFO [IPC Server handler 17 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000013_0 is : 1.0 2015-04-28 15:10:52,544 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 1 procesingTime= 2 2015-04-28 15:10:52,544 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:52,544 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000013_0, org.apache.hadoop.mapred.MapTaskStatus@13c9f0aa), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#7 Retry#0 2015-04-28 15:10:52,544 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000013_0 of type TA_UPDATE 2015-04-28 15:10:52,544 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000013_0, org.apache.hadoop.mapred.MapTaskStatus@13c9f0aa), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:52,544 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:52,546 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:52,546 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 27 on 21207: done(attempt_1430213948957_0001_m_000013_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:52,546 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:52,546 INFO [IPC Server handler 27 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:52,547 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 0 procesingTime= 1 2015-04-28 15:10:52,547 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:52,547 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000013_0 of type TA_DONE 2015-04-28 15:10:52,547 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 27 on 21207: responding to done(attempt_1430213948957_0001_m_000013_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#8 Retry#0 2015-04-28 15:10:52,547 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000013_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:52,547 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000016 taskAttempt attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:52,547 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 27 on 21207: responding to done(attempt_1430213948957_0001_m_000013_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16357 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:52,547 INFO [ContainerLauncher #0] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000016 taskAttempt attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:52,548 INFO [ContainerLauncher #0] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:52,548 INFO [ContainerLauncher #0] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:52,548 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:52,548 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:52,549 DEBUG [ContainerLauncher #0] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:52,549 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:52,549 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:52,550 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:52,550 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP117:16357. Number of active connections: 1 2015-04-28 15:10:52,551 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:52,551 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:52,552 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"3l7xwYwPCeNI48+hQAM1jIB20WHDefiEsvsE/aNb\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:52,552 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@2cb3759c 2015-04-28 15:10:52,553 INFO [ContainerLauncher #0] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:52,553 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:52,553 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:52,553 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:52,553 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:52,553 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:52,554 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"3l7xwYwPCeNI48+hQAM1jIB20WHDefiEsvsE/aNb\",nc=00000001,cnonce=\"OLH/FR9tudKPfYiIGbf4W2RGBHrds/yBbZhp0S05\",digest-uri=\"/default\",maxbuf=65536,response=7b1b6119dd23f9300cd33207a9497da2,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:52,557 DEBUG [ContainerLauncher #0] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=01d58bf94d6afdf1e0940aa9b2fae09c" 2015-04-28 15:10:52,557 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:52,557 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:52,558 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #78 2015-04-28 15:10:52,566 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #78 2015-04-28 15:10:52,566 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:52,566 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:52,566 DEBUG [ContainerLauncher #0] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 17ms 2015-04-28 15:10:52,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:52,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000013_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:52,566 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000013_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:52,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:52,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:52,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:52,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:52,566 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:52,566 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000013 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:52,567 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000013_0 2015-04-28 15:10:52,567 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000013 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:52,567 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:52,567 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:52,567 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:52,567 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:52,567 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:52,567 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 15 2015-04-28 15:10:52,567 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:52,568 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=28, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=101888 2015-04-28 15:10:52,568 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:52,568 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=105038 lastFlushOffset=101888 createNewBlock=false 2015-04-28 15:10:52,568 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 28 2015-04-28 15:10:52,568 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 28 2015-04-28 15:10:52,568 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 28 offsetInBlock: 101888 lastPacketInBlock: false lastByteOffsetInBlock: 105038 2015-04-28 15:10:52,571 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 28 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1510015 flag: 0 flag: 0 2015-04-28 15:10:52,571 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:52,571 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:52,571 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=29, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=104960 2015-04-28 15:10:52,571 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:52,571 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=107746 lastFlushOffset=105038 createNewBlock=false 2015-04-28 15:10:52,572 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 29 2015-04-28 15:10:52,572 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 29 2015-04-28 15:10:52,572 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 29 offsetInBlock: 104960 lastPacketInBlock: false lastByteOffsetInBlock: 107746 2015-04-28 15:10:52,574 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 29 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1491056 flag: 0 flag: 0 2015-04-28 15:10:52,574 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:52,637 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:0 ScheduledReds:0 AssignedMaps:3 AssignedReds:0 CompletedMaps:15 CompletedReds:0 ContAlloc:17 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:10:52,637 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #79 2015-04-28 15:10:52,641 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #79 2015-04-28 15:10:52,641 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 4ms 2015-04-28 15:10:52,641 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: headroom= 2015-04-28 15:10:52,641 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000013 2015-04-28 15:10:52,641 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:52,641 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold not met. completedMapsForReduceSlowstart 16 2015-04-28 15:10:52,641 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:1 ScheduledMaps:0 ScheduledReds:0 AssignedMaps:2 AssignedReds:0 CompletedMaps:15 CompletedReds:0 ContAlloc:17 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:10:52,641 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:52,641 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000012_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:52,641 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:52,641 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000012_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:52,641 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000012_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:53,068 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #176 2015-04-28 15:10:53,068 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#176 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:53,068 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:53,069 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:10:53,069 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#176 Retry#0 2015-04-28 15:10:53,069 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#176 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:53,071 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #177 2015-04-28 15:10:53,071 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#177 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:53,071 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:53,072 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:53,072 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#177 Retry#0 2015-04-28 15:10:53,072 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#177 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:53,074 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #178 2015-04-28 15:10:53,074 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#178 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:53,074 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:53,074 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:53,074 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#178 Retry#0 2015-04-28 15:10:53,074 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#178 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:53,298 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #5 2015-04-28 15:10:53,298 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: statusUpdate(attempt_1430213948957_0001_m_000015_0, org.apache.hadoop.mapred.MapTaskStatus@6b9e631c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#5 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:53,298 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:53,299 INFO [IPC Server handler 5 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000015_0 is : 0.0 2015-04-28 15:10:53,299 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 1 2015-04-28 15:10:53,299 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:53,299 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000015_0, org.apache.hadoop.mapred.MapTaskStatus@6b9e631c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#5 Retry#0 2015-04-28 15:10:53,299 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000015_0 of type TA_UPDATE 2015-04-28 15:10:53,299 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000015_0, org.apache.hadoop.mapred.MapTaskStatus@6b9e631c), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#5 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:53,299 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:53,366 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #7 2015-04-28 15:10:53,366 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: statusUpdate(attempt_1430213948957_0001_m_000015_0, org.apache.hadoop.mapred.MapTaskStatus@2bc32f68), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#7 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:53,366 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:53,366 INFO [IPC Server handler 16 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_m_000015_0 is : 1.0 2015-04-28 15:10:53,368 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 2 2015-04-28 15:10:53,368 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:53,368 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000015_0 of type TA_UPDATE 2015-04-28 15:10:53,368 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000015_0, org.apache.hadoop.mapred.MapTaskStatus@2bc32f68), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#7 Retry#0 2015-04-28 15:10:53,368 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: responding to statusUpdate(attempt_1430213948957_0001_m_000015_0, org.apache.hadoop.mapred.MapTaskStatus@2bc32f68), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#7 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:53,368 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:53,369 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #8 2015-04-28 15:10:53,369 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: done(attempt_1430213948957_0001_m_000015_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#8 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:53,370 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:53,370 INFO [IPC Server handler 10 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:53,370 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 1 procesingTime= 0 2015-04-28 15:10:53,370 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:53,371 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000015_0 of type TA_DONE 2015-04-28 15:10:53,371 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: responding to done(attempt_1430213948957_0001_m_000015_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#8 Retry#0 2015-04-28 15:10:53,371 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000015_0 TaskAttempt Transitioned from RUNNING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:53,371 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: responding to done(attempt_1430213948957_0001_m_000015_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16360 Call#8 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:53,371 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000017 taskAttempt attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:53,371 INFO [ContainerLauncher #1] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000017 taskAttempt attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:53,372 INFO [ContainerLauncher #1] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:53,372 INFO [ContainerLauncher #1] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:53,372 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:53,372 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:53,372 DEBUG [ContainerLauncher #1] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:53,373 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:53,373 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP117:16360. Number of active connections: 0 2015-04-28 15:10:53,374 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:53,374 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:53,375 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:53,375 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:53,377 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"Xys4dY2XmgAzfhDI4fco1egQ2VbdClFffldNEr54\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:53,377 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@62953ecf 2015-04-28 15:10:53,377 INFO [ContainerLauncher #1] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:53,377 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:53,378 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:53,378 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:53,378 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:53,378 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:53,378 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"Xys4dY2XmgAzfhDI4fco1egQ2VbdClFffldNEr54\",nc=00000001,cnonce=\"ha1MJ0SYGx8Hk4RKfXhV5zuLFQClxvUKg2be5ILs\",digest-uri=\"/default\",maxbuf=65536,response=540d450cdab696e4647ccf5f7ec47506,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:53,381 DEBUG [ContainerLauncher #1] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=bf5fe72acc789b9afe9d924f4c5c4370" 2015-04-28 15:10:53,381 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:53,382 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:53,382 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #80 2015-04-28 15:10:53,387 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #80 2015-04-28 15:10:53,387 DEBUG [ContainerLauncher #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 13ms 2015-04-28 15:10:53,387 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:53,387 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:53,387 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:53,387 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000015_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:53,388 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_m_000015_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:53,388 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:53,388 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:53,388 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: MAP_ATTEMPT_FINISHED 2015-04-28 15:10:53,388 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:53,388 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:53,388 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_m_000015 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:53,389 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_m_000015_0 2015-04-28 15:10:53,389 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_m_000015 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:53,389 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:53,389 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:53,389 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:53,389 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=30, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=107520 2015-04-28 15:10:53,389 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:53,389 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:53,389 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:53,389 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 16 2015-04-28 15:10:53,389 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=110881 lastFlushOffset=107746 createNewBlock=false 2015-04-28 15:10:53,389 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 30 2015-04-28 15:10:53,389 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:53,389 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 30 2015-04-28 15:10:53,390 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 30 offsetInBlock: 107520 lastPacketInBlock: false lastByteOffsetInBlock: 110881 2015-04-28 15:10:53,392 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 30 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1438289 flag: 0 flag: 0 2015-04-28 15:10:53,392 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler MAP_ATTEMPT_FINISHED 2015-04-28 15:10:53,392 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:53,393 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=31, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=110592 2015-04-28 15:10:53,393 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:53,393 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=113589 lastFlushOffset=110881 createNewBlock=false 2015-04-28 15:10:53,393 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 31 2015-04-28 15:10:53,393 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 31 2015-04-28 15:10:53,393 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 31 offsetInBlock: 110592 lastPacketInBlock: false lastByteOffsetInBlock: 113589 2015-04-28 15:10:53,397 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 31 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 2744360 flag: 0 flag: 0 2015-04-28 15:10:53,397 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:53,641 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Before Scheduling: PendingReds:1 ScheduledMaps:0 ScheduledReds:0 AssignedMaps:2 AssignedReds:0 CompletedMaps:16 CompletedReds:0 ContAlloc:17 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:10:53,642 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #81 2015-04-28 15:10:53,645 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #81 2015-04-28 15:10:53,645 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 3ms 2015-04-28 15:10:53,645 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: headroom= 2015-04-28 15:10:53,645 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000016 2015-04-28 15:10:53,645 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Recalculating schedule, headroom= 2015-04-28 15:10:53,645 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Reduce slow start threshold reached. Scheduling reduces. 2015-04-28 15:10:53,645 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:53,645 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: All maps assigned. Ramping up all remaining reduces:1 2015-04-28 15:10:53,645 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000013_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:53,645 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: Added priority=10 2015-04-28 15:10:53,645 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:53,645 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000013_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:53,645 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: addResourceRequest: applicationId=1 priority=10 resourceName=* numContainers=1 #asks=1 2015-04-28 15:10:53,646 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:0 ScheduledMaps:0 ScheduledReds:1 AssignedMaps:1 AssignedReds:0 CompletedMaps:16 CompletedReds:0 ContAlloc:17 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:10:53,646 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000013_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:54,077 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #179 2015-04-28 15:10:54,077 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#179 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:54,095 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:54,096 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 18 procesingTime= 1 2015-04-28 15:10:54,096 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#179 Retry#0 2015-04-28 15:10:54,096 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#179 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:54,097 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #180 2015-04-28 15:10:54,098 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#180 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:54,098 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:54,098 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:54,098 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#180 Retry#0 2015-04-28 15:10:54,098 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#180 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:54,100 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #181 2015-04-28 15:10:54,100 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#181 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:54,100 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:54,100 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:54,100 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#181 Retry#0 2015-04-28 15:10:54,100 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#181 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:54,646 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #82 2015-04-28 15:10:54,650 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #82 2015-04-28 15:10:54,650 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 4ms 2015-04-28 15:10:54,651 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=1 release= 0 newContainers=0 finishedContainers=1 resourcelimit= knownNMs=2 2015-04-28 15:10:54,651 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: headroom= 2015-04-28 15:10:54,651 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received completed container container_1430213948957_0001_01_000017 2015-04-28 15:10:54,651 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:0 ScheduledMaps:0 ScheduledReds:1 AssignedMaps:0 AssignedReds:0 CompletedMaps:16 CompletedReds:0 ContAlloc:17 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:10:54,651 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_COMPLETED 2015-04-28 15:10:54,651 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000015_0 of type TA_CONTAINER_COMPLETED 2015-04-28 15:10:54,651 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent.EventType: TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:54,651 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_m_000015_0 of type TA_DIAGNOSTICS_UPDATE 2015-04-28 15:10:54,651 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Diagnostics report from attempt_1430213948957_0001_m_000015_0: Container killed by the ApplicationMaster. Container killed on request. Exit code is 143 Container exited with a non-zero exit code 143 2015-04-28 15:10:55,103 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #182 2015-04-28 15:10:55,103 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#182 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:55,103 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:55,104 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:10:55,104 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#182 Retry#0 2015-04-28 15:10:55,104 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#182 Retry#0 Wrote 33 bytes. 2015-04-28 15:10:55,105 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #183 2015-04-28 15:10:55,106 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#183 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:55,106 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:55,106 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:55,106 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#183 Retry#0 2015-04-28 15:10:55,106 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#183 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:55,108 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #184 2015-04-28 15:10:55,108 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#184 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:55,108 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:55,109 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:55,109 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#184 Retry#0 2015-04-28 15:10:55,109 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#184 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:55,651 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #83 2015-04-28 15:10:55,658 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #83 2015-04-28 15:10:55,658 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 7ms 2015-04-28 15:10:55,658 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: headroom= 2015-04-28 15:10:55,658 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Received new Container :Container: [ContainerId: container_1430213948957_0001_01_000021, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 10, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] 2015-04-28 15:10:55,658 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Got allocated containers 1 2015-04-28 15:10:55,658 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container container_1430213948957_0001_01_000021 with priority 10 to NM host-IP117:64318 2015-04-28 15:10:55,659 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigning container Container: [ContainerId: container_1430213948957_0001_01_000021, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 10, Token: Token { kind: ContainerToken, service: IP117:64318 }, ] to reduce 2015-04-28 15:10:55,659 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned to reduce 2015-04-28 15:10:55,659 DEBUG [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: BEFORE decResourceRequest: applicationId=1 priority=10 resourceName=* numContainers=1 #asks=0 2015-04-28 15:10:55,659 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: AFTER decResourceRequest: applicationId=1 priority=10 resourceName=* numContainers=0 #asks=1 2015-04-28 15:10:55,659 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent.EventType: TA_ASSIGNED 2015-04-28 15:10:55,659 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container container_1430213948957_0001_01_000021 to attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:55,659 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_r_000000_0 of type TA_ASSIGNED 2015-04-28 15:10:55,659 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Assigned container (Container: [ContainerId: container_1430213948957_0001_01_000021, NodeId: host-IP117:64318, NodeHttpAddress: host-IP117:64320, Resource: , Priority: 10, Token: Token { kind: ContainerToken, service: IP117:64318 }, ]) to task attempt_1430213948957_0001_r_000000_0 on node host-IP117:64318 2015-04-28 15:10:55,659 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: After Scheduling: PendingReds:0 ScheduledMaps:0 ScheduledReds:0 AssignedMaps:0 AssignedReds:1 CompletedMaps:16 CompletedReds:0 ContAlloc:18 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:10:55,668 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapred.SortedRanges: currentIndex 0 0:0 2015-04-28 15:10:55,672 INFO [AsyncDispatcher event handler] org.apache.hadoop.yarn.util.RackResolver: Resolved host-IP117 to /default-rack 2015-04-28 15:10:55,673 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_r_000000_0 TaskAttempt Transitioned from UNASSIGNED to ASSIGNED 2015-04-28 15:10:55,673 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent.EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000021 taskAttempt attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:55,673 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: TASK_CONTAINER_NEED_UPDATE 2015-04-28 15:10:55,673 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_LAUNCH for container container_1430213948957_0001_01_000021 taskAttempt attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:55,673 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Launching attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:55,673 INFO [ContainerLauncher #2] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:55,674 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:55,674 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:55,674 DEBUG [ContainerLauncher #2] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:55,674 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:55,677 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:55,677 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:55,678 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:55,678 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:55,680 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"rtvFwL+DMIE9pM3lrd5U5hWqvjxLcOi1ZqPzNd2d\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:55,680 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@7750f68e 2015-04-28 15:10:55,680 INFO [ContainerLauncher #2] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:55,680 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:55,680 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:55,680 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:55,680 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:55,680 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:55,680 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"rtvFwL+DMIE9pM3lrd5U5hWqvjxLcOi1ZqPzNd2d\",nc=00000001,cnonce=\"5DEhoAqW34QKhJQY/EsauHvHtNQHtz3I7Nc4AYe6\",digest-uri=\"/default\",maxbuf=65536,response=136d68fce9725127b55ad3907a69f566,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:55,683 DEBUG [ContainerLauncher #2] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=87e31d9e8e3dd41075114036ef6c3242" 2015-04-28 15:10:55,683 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:55,684 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:55,685 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #84 2015-04-28 15:10:55,691 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #84 2015-04-28 15:10:55,691 DEBUG [ContainerLauncher #2] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: startContainers took 15ms 2015-04-28 15:10:55,691 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:55,691 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:55,692 INFO [ContainerLauncher #2] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Shuffle port returned by ContainerManager for attempt_1430213948957_0001_r_000000_0 : 13562 2015-04-28 15:10:55,692 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent.EventType: TA_CONTAINER_LAUNCHED 2015-04-28 15:10:55,692 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_r_000000_0 of type TA_CONTAINER_LAUNCHED 2015-04-28 15:10:55,692 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: TaskAttempt: [attempt_1430213948957_0001_r_000000_0] using containerId: [container_1430213948957_0001_01_000021 on NM: [host-IP117:64318] 2015-04-28 15:10:55,692 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_r_000000_0 TaskAttempt Transitioned from ASSIGNED to RUNNING 2015-04-28 15:10:55,692 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:55,692 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:55,692 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: REDUCE_ATTEMPT_STARTED 2015-04-28 15:10:55,692 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_START 2015-04-28 15:10:55,692 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:55,692 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_LAUNCHED 2015-04-28 15:10:55,692 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_r_000000 of type T_ATTEMPT_LAUNCHED 2015-04-28 15:10:55,692 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_r_000000 Task Transitioned from SCHEDULED to RUNNING 2015-04-28 15:10:55,692 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=32, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=113152 2015-04-28 15:10:55,693 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler REDUCE_ATTEMPT_STARTED 2015-04-28 15:10:56,111 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #185 2015-04-28 15:10:56,111 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#185 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:56,112 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:56,112 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:10:56,112 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#185 Retry#0 2015-04-28 15:10:56,112 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#185 Retry#0 Wrote 33 bytes. 2015-04-28 15:10:56,115 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #186 2015-04-28 15:10:56,115 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#186 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:56,116 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:56,116 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:56,116 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#186 Retry#0 2015-04-28 15:10:56,117 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#186 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:56,118 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #187 2015-04-28 15:10:56,118 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#187 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:56,119 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:56,119 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:56,119 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#187 Retry#0 2015-04-28 15:10:56,119 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#187 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:56,660 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #85 2015-04-28 15:10:56,664 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #85 2015-04-28 15:10:56,664 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 4ms 2015-04-28 15:10:56,665 INFO [RMCommunicator Allocator] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: getResources() for application_1430213948957_0001: ask=1 release= 0 newContainers=0 finishedContainers=0 resourcelimit= knownNMs=2 2015-04-28 15:10:56,986 DEBUG [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Server connection from IP117:16365; # active connections: 1; # queued calls: 0 2015-04-28 15:10:57,048 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:57,048 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: Created SASL server with mechanism = DIGEST-MD5 2015-04-28 15:10:57,048 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"Du8unfHLV0tNumApuldxHZacpHMXSQPnNNCrrG1W\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:57,048 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16365 Call#-33 Retry#-1 2015-04-28 15:10:57,048 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16365 Call#-33 Retry#-1 Wrote 178 bytes. 2015-04-28 15:10:57,122 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #188 2015-04-28 15:10:57,122 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#188 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:57,122 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:57,122 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 0 2015-04-28 15:10:57,123 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#188 Retry#0 2015-04-28 15:10:57,123 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#188 Retry#0 Wrote 33 bytes. 2015-04-28 15:10:57,124 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #189 2015-04-28 15:10:57,124 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#189 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:57,124 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:57,125 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:57,125 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#189 Retry#0 2015-04-28 15:10:57,125 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#189 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:57,126 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-33 2015-04-28 15:10:57,126 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #190 2015-04-28 15:10:57,126 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Have read input token of size 270 for processing by saslServer.evaluateResponse() 2015-04-28 15:10:57,126 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#190 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:57,126 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:57,127 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting password for client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:57,127 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:57,127 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#190 Retry#0 2015-04-28 15:10:57,127 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#190 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:57,127 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.security.SaslRpcServer: SASL server DIGEST-MD5 callback: setting canonicalized client ID: job_1430213948957_0001 2015-04-28 15:10:57,127 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Will send SUCCESS token of size 40 from saslServer. 2015-04-28 15:10:57,127 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server context established. Negotiated QoP is auth 2015-04-28 15:10:57,127 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: SASL server successfully authenticated client: job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:57,127 INFO [Socket Reader #1 for port 21207] SecurityLogger.org.apache.hadoop.ipc.Server: Auth successful for job_1430213948957_0001 (auth:SIMPLE) 2015-04-28 15:10:57,127 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Sending sasl message state: SUCCESS token: "rspauth=7f6495432b8ca0de90d52ebc0e239296" 2015-04-28 15:10:57,127 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16365 Call#-33 Retry#-1 2015-04-28 15:10:57,127 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: responding to null from IP117:16365 Call#-33 Retry#-1 Wrote 64 bytes. 2015-04-28 15:10:57,137 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #-3 2015-04-28 15:10:57,137 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Successfully authorized userInfo { } protocol: "org.apache.hadoop.mapred.TaskUmbilicalProtocol" 2015-04-28 15:10:57,137 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #0 2015-04-28 15:10:57,137 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: getTask(org.apache.hadoop.mapred.JvmContext@5448d7ea), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#0 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:57,138 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:57,138 INFO [IPC Server handler 13 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID : jvm_1430213948957_0001_r_000021 asked for a task 2015-04-28 15:10:57,139 INFO [IPC Server handler 13 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: JVM with ID: jvm_1430213948957_0001_r_000021 given task: attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:57,139 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: Served: getTask queueTime= 1 procesingTime= 1 2015-04-28 15:10:57,141 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@5448d7ea), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#0 Retry#0 2015-04-28 15:10:57,141 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: responding to getTask(org.apache.hadoop.mapred.JvmContext@5448d7ea), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#0 Retry#0 Wrote 448 bytes. 2015-04-28 15:10:57,665 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #86 2015-04-28 15:10:57,668 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #86 2015-04-28 15:10:57,668 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 3ms 2015-04-28 15:10:57,877 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #1 2015-04-28 15:10:57,877 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: getMapCompletionEvents(job_1430213948957_0001, 0, 10000, attempt_1430213948957_0001_r_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#1 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:57,877 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:57,878 INFO [IPC Server handler 20 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: MapCompletionEvents request from attempt_1430213948957_0001_r_000000_0. startIndex 0 maxEvents 10000 2015-04-28 15:10:57,879 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: Served: getMapCompletionEvents queueTime= 1 procesingTime= 1 2015-04-28 15:10:57,879 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: responding to getMapCompletionEvents(job_1430213948957_0001, 0, 10000, attempt_1430213948957_0001_r_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#1 Retry#0 2015-04-28 15:10:57,879 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: responding to getMapCompletionEvents(job_1430213948957_0001, 0, 10000, attempt_1430213948957_0001_r_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#1 Retry#0 Wrote 1396 bytes. 2015-04-28 15:10:58,129 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #2 2015-04-28 15:10:58,130 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: statusUpdate(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@5a8f3313), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#2 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:58,130 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #191 2015-04-28 15:10:58,130 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:58,130 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#191 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:58,130 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:58,130 INFO [IPC Server handler 13 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_r_000000_0 is : 0.0 2015-04-28 15:10:58,130 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 0 procesingTime= 0 2015-04-28 15:10:58,131 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#191 Retry#0 2015-04-28 15:10:58,131 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#191 Retry#0 Wrote 33 bytes. 2015-04-28 15:10:58,132 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 0 procesingTime= 2 2015-04-28 15:10:58,132 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:58,132 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: responding to statusUpdate(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@5a8f3313), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#2 Retry#0 2015-04-28 15:10:58,132 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_r_000000_0 of type TA_UPDATE 2015-04-28 15:10:58,132 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: responding to statusUpdate(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@5a8f3313), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#2 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:58,132 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:58,132 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #192 2015-04-28 15:10:58,132 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#192 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:58,132 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:58,133 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 1 procesingTime= 0 2015-04-28 15:10:58,133 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#192 Retry#0 2015-04-28 15:10:58,133 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#192 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:58,139 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #193 2015-04-28 15:10:58,139 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#193 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:58,139 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:58,140 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:58,140 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#193 Retry#0 2015-04-28 15:10:58,140 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#193 Retry#0 Wrote 267 bytes. 2015-04-28 15:10:58,157 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #3 2015-04-28 15:10:58,157 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 6 on 21207: statusUpdate(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@733ac7a0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#3 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:58,157 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:58,158 INFO [IPC Server handler 6 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_r_000000_0 is : 0.0 2015-04-28 15:10:58,158 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 1 procesingTime= 0 2015-04-28 15:10:58,158 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:58,159 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_r_000000_0 of type TA_UPDATE 2015-04-28 15:10:58,159 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 6 on 21207: responding to statusUpdate(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@733ac7a0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#3 Retry#0 2015-04-28 15:10:58,159 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:58,159 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 6 on 21207: responding to statusUpdate(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@733ac7a0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#3 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:58,497 DEBUG [IPC Server idle connection scanner for port 48332] org.apache.hadoop.ipc.Server: IPC Server idle connection scanner for port 48332: task running 2015-04-28 15:10:58,669 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #87 2015-04-28 15:10:58,672 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #87 2015-04-28 15:10:58,672 DEBUG [RMCommunicator Allocator] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: allocate took 3ms 2015-04-28 15:10:58,745 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #12 2015-04-28 15:10:58,745 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: commitPending(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@6cdab127), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#12 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:58,746 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:58,746 INFO [IPC Server handler 20 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Commit-pending state update from attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:58,746 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: Served: commitPending queueTime= 1 procesingTime= 0 2015-04-28 15:10:58,746 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_COMMIT_PENDING 2015-04-28 15:10:58,746 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_r_000000_0 of type TA_COMMIT_PENDING 2015-04-28 15:10:58,746 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: responding to commitPending(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@6cdab127), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#12 Retry#0 2015-04-28 15:10:58,746 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_r_000000_0 TaskAttempt Transitioned from RUNNING to COMMIT_PENDING 2015-04-28 15:10:58,746 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_COMMIT_PENDING 2015-04-28 15:10:58,746 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: responding to commitPending(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@6cdab127), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#12 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:58,746 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_r_000000 of type T_ATTEMPT_COMMIT_PENDING 2015-04-28 15:10:58,747 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: attempt_1430213948957_0001_r_000000_0 given a go for committing the task output. 2015-04-28 15:10:58,748 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #13 2015-04-28 15:10:58,748 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 22 on 21207: canCommit(attempt_1430213948957_0001_r_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#13 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:58,748 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:58,748 INFO [IPC Server handler 22 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Commit go/no-go request from attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:58,748 INFO [IPC Server handler 22 on 21207] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Result of canCommit for attempt_1430213948957_0001_r_000000_0:true 2015-04-28 15:10:58,748 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.ipc.Server: Served: canCommit queueTime= 0 procesingTime= 0 2015-04-28 15:10:58,749 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 22 on 21207: responding to canCommit(attempt_1430213948957_0001_r_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#13 Retry#0 2015-04-28 15:10:58,749 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 22 on 21207: responding to canCommit(attempt_1430213948957_0001_r_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#13 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:58,819 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #17 2015-04-28 15:10:58,819 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: statusUpdate(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@5eb0bc9a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#17 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:58,819 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:58,820 INFO [IPC Server handler 13 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Progress of TaskAttempt attempt_1430213948957_0001_r_000000_0 is : 1.0 2015-04-28 15:10:58,823 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: Served: statusUpdate queueTime= 1 procesingTime= 3 2015-04-28 15:10:58,823 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.EventType: TA_UPDATE 2015-04-28 15:10:58,823 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_r_000000_0 of type TA_UPDATE 2015-04-28 15:10:58,823 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: responding to statusUpdate(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@5eb0bc9a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#17 Retry#0 2015-04-28 15:10:58,823 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: responding to statusUpdate(attempt_1430213948957_0001_r_000000_0, org.apache.hadoop.mapred.ReduceTaskStatus@5eb0bc9a), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#17 Retry#0 Wrote 41 bytes. 2015-04-28 15:10:58,823 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:58,825 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: got #18 2015-04-28 15:10:58,825 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 6 on 21207: done(attempt_1430213948957_0001_r_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#18 Retry#0 for RpcKind RPC_WRITABLE 2015-04-28 15:10:58,825 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:job_1430213948957_0001 (auth:TOKEN) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:58,826 INFO [IPC Server handler 6 on 21207] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Done acknowledgement from attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:58,826 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.ipc.Server: Served: done queueTime= 0 procesingTime= 1 2015-04-28 15:10:58,826 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_DONE 2015-04-28 15:10:58,826 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_r_000000_0 of type TA_DONE 2015-04-28 15:10:58,826 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 6 on 21207: responding to done(attempt_1430213948957_0001_r_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#18 Retry#0 2015-04-28 15:10:58,826 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_r_000000_0 TaskAttempt Transitioned from COMMIT_PENDING to SUCCESS_CONTAINER_CLEANUP 2015-04-28 15:10:58,826 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 6 on 21207: responding to done(attempt_1430213948957_0001_r_000000_0), rpc version=2, client version=19, methodsFingerPrint=937413979 from IP117:16365 Call#18 Retry#0 Wrote 118 bytes. 2015-04-28 15:10:58,826 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent.EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000021 taskAttempt attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:58,826 INFO [ContainerLauncher #3] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: Processing the event EventType: CONTAINER_REMOTE_CLEANUP for container container_1430213948957_0001_01_000021 taskAttempt attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:58,827 INFO [ContainerLauncher #3] org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl: KILLING attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:58,827 INFO [ContainerLauncher #3] org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy: Opening proxy : host-IP117:64318 2015-04-28 15:10:58,827 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SecurityUtil: Acquired token Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:58,827 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.yarn.client.ServerProxy.createRetriableProxy(ServerProxy.java:87) 2015-04-28 15:10:58,828 DEBUG [ContainerLauncher #3] org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ContainerManagementProtocol 2015-04-28 15:10:58,828 DEBUG [Socket Reader #1 for port 21207] org.apache.hadoop.ipc.Server: Socket Reader #1 for port 21207: disconnecting client IP117:16365. Number of active connections: 0 2015-04-28 15:10:58,828 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@13526e59 2015-04-28 15:10:58,828 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:58,829 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: Connecting to host-IP117/IP117:64318 2015-04-28 15:10:58,829 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:appattempt_1430213948957_0001_000001 (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:58,830 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:58,831 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"yanqieh/2/oton/N5PT0yAipP5UQ/HytcD4Wag4C\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } 2015-04-28 15:10:58,831 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB info:org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo$1@ffb6be6 2015-04-28 15:10:58,832 INFO [ContainerLauncher #3] org.apache.hadoop.yarn.security.NMTokenSelector: Looking for service: IP117:64318. Current token is Kind: NMToken, Service: IP117:64318, Ident: (appAttemptId { application_id { id: 1 cluster_timestamp: 1430213948957 } attemptId: 1 } nodeId { host: "host-IP117" port: 64318 } appSubmitter: "dsperf" keyId: 1569710345) 2015-04-28 15:10:58,832 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Creating SASL DIGEST-MD5(TOKEN) client to authenticate to service at default 2015-04-28 15:10:58,832 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Use TOKEN authentication for protocol ContainerManagementProtocolPB 2015-04-28 15:10:58,832 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting username: Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF 2015-04-28 15:10:58,832 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting userPassword 2015-04-28 15:10:58,832 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: SASL client callback: setting realm: default 2015-04-28 15:10:58,833 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE token: "charset=utf-8,username=\"Cg0KCQgBEJ3sk/vPKRABEhcKEWhvc3QtMTAtMTktOTItMTE3EL72AxoGZHNwZXJmIInCv+wF\",realm=\"default\",nonce=\"yanqieh/2/oton/N5PT0yAipP5UQ/HytcD4Wag4C\",nc=00000001,cnonce=\"f8TZja6BvuNW+J/0qEm7ASLXFN6+m7TGIuctqtW5\",digest-uri=\"/default\",maxbuf=65536,response=99af87d3a1eef5aadc3803a010466b9e,qop=auth" auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" } 2015-04-28 15:10:58,835 DEBUG [ContainerLauncher #3] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: SUCCESS token: "rspauth=6c1ada893a650acdb8641792b42c181a" 2015-04-28 15:10:58,835 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.Client: Negotiated QOP is :auth 2015-04-28 15:10:58,835 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: starting, having connections 2 2015-04-28 15:10:58,836 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 sending #88 2015-04-28 15:10:58,839 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001 got value #88 2015-04-28 15:10:58,840 DEBUG [ContainerLauncher #3] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: stopContainers took 12ms 2015-04-28 15:10:58,840 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: closed 2015-04-28 15:10:58,840 DEBUG [IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to host-IP117/IP117:64318 from appattempt_1430213948957_0001_000001: stopped, remaining connections 1 2015-04-28 15:10:58,840 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent.EventType: TA_CONTAINER_CLEANED 2015-04-28 15:10:58,840 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: Processing attempt_1430213948957_0001_r_000000_0 of type TA_CONTAINER_CLEANED 2015-04-28 15:10:58,842 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl: attempt_1430213948957_0001_r_000000_0 TaskAttempt Transitioned from SUCCESS_CONTAINER_CLEANUP to SUCCEEDED 2015-04-28 15:10:58,842 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent.EventType: JOB_COUNTER_UPDATE 2015-04-28 15:10:58,842 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COUNTER_UPDATE 2015-04-28 15:10:58,842 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: REDUCE_ATTEMPT_FINISHED 2015-04-28 15:10:58,842 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent.EventType: T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:58,842 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:58,842 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Processing task_1430213948957_0001_r_000000 of type T_ATTEMPT_SUCCEEDED 2015-04-28 15:10:58,842 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: Task succeeded with attempt attempt_1430213948957_0001_r_000000_0 2015-04-28 15:10:58,842 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl: task_1430213948957_0001_r_000000 Task Transitioned from RUNNING to SUCCEEDED 2015-04-28 15:10:58,842 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent.EventType: ATTEMPT_STATUS_UPDATE 2015-04-28 15:10:58,842 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent.EventType: JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:58,842 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_ATTEMPT_COMPLETED 2015-04-28 15:10:58,843 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent.EventType: JOB_TASK_COMPLETED 2015-04-28 15:10:58,843 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_TASK_COMPLETED 2015-04-28 15:10:58,843 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Num completed Tasks: 17 2015-04-28 15:10:58,843 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: job_1430213948957_0001Job Transitioned from RUNNING to COMMITTING 2015-04-28 15:10:58,843 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: TASK_FINISHED 2015-04-28 15:10:58,843 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.commit.CommitterJobCommitEvent.EventType: JOB_COMMIT 2015-04-28 15:10:58,843 INFO [CommitterEvent Processor #1] org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventHandler: Processing the event EventType: JOB_COMMIT 2015-04-28 15:10:58,844 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.hdfs.DFSClient: /staging-dir/dsperf/.staging/job_1430213948957_0001/COMMIT_STARTED: masked=rw-r--r-- 2015-04-28 15:10:58,844 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.Client: The ping interval is 60000 ms. 2015-04-28 15:10:58,844 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.Client: Connecting to /IP127:65110 2015-04-28 15:10:58,845 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:719) 2015-04-28 15:10:58,846 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: NEGOTIATE 2015-04-28 15:10:58,846 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:58,847 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=117724 lastFlushOffset=113589 createNewBlock=false 2015-04-28 15:10:58,847 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 32 2015-04-28 15:10:58,847 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 32 2015-04-28 15:10:58,847 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.security.SaslRpcClient: Received SASL message state: NEGOTIATE auths { method: "TOKEN" mechanism: "DIGEST-MD5" protocol: "" serverId: "default" challenge: "realm=\"default\",nonce=\"g2LfHk7Xe8fs6rcmT0nu7cjs6Os8NKIa/ZT/PUGa\",qop=\"auth\",charset=utf-8,algorithm=md5-sess" } auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:58,847 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB info:@org.apache.hadoop.security.token.TokenInfo(value=class org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSelector) 2015-04-28 15:10:58,847 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.security.SaslRpcClient: Use SIMPLE authentication for protocol ClientNamenodeProtocolPB 2015-04-28 15:10:58,847 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.security.SaslRpcClient: Sending sasl message state: INITIATE auths { method: "SIMPLE" mechanism: "" } 2015-04-28 15:10:58,848 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 32 offsetInBlock: 113152 lastPacketInBlock: false lastByteOffsetInBlock: 117724 2015-04-28 15:10:58,849 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #89 2015-04-28 15:10:58,849 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf: starting, having connections 2 2015-04-28 15:10:58,853 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 32 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 3842048 flag: 0 flag: 0 2015-04-28 15:10:58,853 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler REDUCE_ATTEMPT_FINISHED 2015-04-28 15:10:58,853 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:58,854 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=33, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=117248 2015-04-28 15:10:58,854 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:58,854 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=120941 lastFlushOffset=117724 createNewBlock=false 2015-04-28 15:10:58,854 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 33 2015-04-28 15:10:58,854 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 33 2015-04-28 15:10:58,854 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 33 offsetInBlock: 117248 lastPacketInBlock: false lastByteOffsetInBlock: 120941 2015-04-28 15:10:58,857 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 33 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1941885 flag: 0 flag: 0 2015-04-28 15:10:58,857 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler TASK_FINISHED 2015-04-28 15:10:58,874 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #89 2015-04-28 15:10:58,874 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: create took 30ms 2015-04-28 15:10:58,875 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/dsperf/.staging/job_1430213948957_0001/COMMIT_STARTED, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:58,875 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: -1 2015-04-28 15:10:58,875 DEBUG [LeaseRenewer:dsperf@hacluster] org.apache.hadoop.hdfs.LeaseRenewer: Lease renewer daemon for [DFSClient_NONMAPREDUCE_-511098579_1] with renew id 1 started 2015-04-28 15:10:58,876 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #90 2015-04-28 15:10:58,899 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #90 2015-04-28 15:10:58,899 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: complete took 24ms 2015-04-28 15:10:58,903 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #91 2015-04-28 15:10:58,904 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #91 2015-04-28 15:10:58,904 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getListing took 1ms 2015-04-28 15:10:58,909 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Merging data from FileStatus{path=hdfs://hacluster/user/dsperf/QuasiMonteCarlo_1430213999816_1165157787/out/_temporary/1/task_1430213948957_0001_r_000000; isDirectory=true; modification_time=1430214048232; access_time=0; owner=dsperf; group=supergroup; permission=rwxr-xr-x; isSymlink=false} to hdfs://hacluster/user/dsperf/QuasiMonteCarlo_1430213999816_1165157787/out 2015-04-28 15:10:58,909 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #92 2015-04-28 15:10:58,910 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #92 2015-04-28 15:10:58,910 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms 2015-04-28 15:10:58,911 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #93 2015-04-28 15:10:58,912 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #93 2015-04-28 15:10:58,912 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getListing took 1ms 2015-04-28 15:10:58,912 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Merging data from FileStatus{path=hdfs://hacluster/user/dsperf/QuasiMonteCarlo_1430213999816_1165157787/out/_temporary/1/task_1430213948957_0001_r_000000/part-r-00000; isDirectory=false; length=97; replication=3; blocksize=33554432; modification_time=1430214048691; access_time=1430214048232; owner=dsperf; group=supergroup; permission=rw-r--r--; isSymlink=false} to hdfs://hacluster/user/dsperf/QuasiMonteCarlo_1430213999816_1165157787/out/part-r-00000 2015-04-28 15:10:58,913 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #94 2015-04-28 15:10:58,914 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #94 2015-04-28 15:10:58,914 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms 2015-04-28 15:10:58,916 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #95 2015-04-28 15:10:58,933 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #95 2015-04-28 15:10:58,933 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: rename took 17ms 2015-04-28 15:10:58,937 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #96 2015-04-28 15:10:58,968 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #96 2015-04-28 15:10:58,968 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: delete took 31ms 2015-04-28 15:10:58,970 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.hdfs.DFSClient: /user/dsperf/QuasiMonteCarlo_1430213999816_1165157787/out/_SUCCESS: masked=rw-r--r-- 2015-04-28 15:10:58,971 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #97 2015-04-28 15:10:58,991 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #97 2015-04-28 15:10:58,991 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: create took 21ms 2015-04-28 15:10:58,991 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/user/dsperf/QuasiMonteCarlo_1430213999816_1165157787/out/_SUCCESS, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:58,991 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: -1 2015-04-28 15:10:58,992 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #98 2015-04-28 15:10:59,016 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #98 2015-04-28 15:10:59,016 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: complete took 24ms 2015-04-28 15:10:59,016 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.hdfs.DFSClient: /staging-dir/dsperf/.staging/job_1430213948957_0001/COMMIT_SUCCESS: masked=rw-r--r-- 2015-04-28 15:10:59,017 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #99 2015-04-28 15:10:59,041 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #99 2015-04-28 15:10:59,041 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: create took 25ms 2015-04-28 15:10:59,041 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/dsperf/.staging/job_1430213948957_0001/COMMIT_SUCCESS, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:59,041 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: -1 2015-04-28 15:10:59,042 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #100 2015-04-28 15:10:59,066 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #100 2015-04-28 15:10:59,066 DEBUG [CommitterEvent Processor #1] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: complete took 24ms 2015-04-28 15:10:59,066 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobCommitCompletedEvent.EventType: JOB_COMMIT_COMPLETED 2015-04-28 15:10:59,067 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Processing job_1430213948957_0001 of type JOB_COMMIT_COMPLETED 2015-04-28 15:10:59,069 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: Calling handler for JobFinishedEvent 2015-04-28 15:10:59,070 INFO [AsyncDispatcher event handler] org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl: job_1430213948957_0001Job Transitioned from COMMITTING to SUCCEEDED 2015-04-28 15:10:59,070 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent.EventType: JOB_FINISHED 2015-04-28 15:10:59,070 DEBUG [AsyncDispatcher event handler] org.apache.hadoop.yarn.event.AsyncDispatcher: Dispatching the event org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent.EventType: STATE_CHANGED 2015-04-28 15:10:59,070 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Writing event 2015-04-28 15:10:59,071 INFO [Thread-99] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: We are finishing cleanly so this is the last retry 2015-04-28 15:10:59,071 INFO [Thread-99] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: Notify RMCommunicator isAMLastRetry: true 2015-04-28 15:10:59,071 INFO [Thread-99] org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator: RMCommunicator notified that shouldUnregistered is: true 2015-04-28 15:10:59,071 INFO [Thread-99] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: Notify JHEH isAMLastRetry: true 2015-04-28 15:10:59,071 INFO [Thread-99] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: JobHistoryEventHandler notified that forceJobCompletion is true 2015-04-28 15:10:59,071 INFO [Thread-99] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: Calling stop for all the services 2015-04-28 15:10:59,071 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.MRAppMaster entered state STOPPED 2015-04-28 15:10:59,071 DEBUG [Thread-99] org.apache.hadoop.service.CompositeService: org.apache.hadoop.mapreduce.v2.app.MRAppMaster: stopping services, size=7 2015-04-28 15:10:59,071 DEBUG [Thread-99] org.apache.hadoop.service.CompositeService: Stopping service #6: Service JobHistoryEventHandler in state JobHistoryEventHandler: STARTED 2015-04-28 15:10:59,072 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: JobHistoryEventHandler entered state STOPPED 2015-04-28 15:10:59,073 INFO [Thread-99] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Stopping JobHistoryEventHandler. Size of the outstanding queue size is 0 2015-04-28 15:10:59,075 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=34, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=120832 2015-04-28 15:10:59,076 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Flushing Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:59,076 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient flush(): bytesCurBlock=131288 lastFlushOffset=120941 createNewBlock=false 2015-04-28 15:10:59,076 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 34 2015-04-28 15:10:59,076 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 34 2015-04-28 15:10:59,077 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 34 offsetInBlock: 120832 lastPacketInBlock: false lastByteOffsetInBlock: 131288 2015-04-28 15:10:59,080 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 34 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 2289342 flag: 0 flag: 0 2015-04-28 15:10:59,081 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: In HistoryEventHandler JOB_FINISHED 2015-04-28 15:10:59,081 DEBUG [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Closing Writer 2015-04-28 15:10:59,081 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=35, src=/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist, packetSize=65016, chunksPerPacket=126, bytesCurBlock=131072 2015-04-28 15:10:59,081 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 35 2015-04-28 15:10:59,081 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 36 2015-04-28 15:10:59,081 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 36 2015-04-28 15:10:59,081 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 35 offsetInBlock: 131072 lastPacketInBlock: false lastByteOffsetInBlock: 131288 2015-04-28 15:10:59,082 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 35 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 957138 flag: 0 flag: 0 2015-04-28 15:10:59,083 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741846_1022 sending packet packet seqno: 36 offsetInBlock: 131288 lastPacketInBlock: true lastByteOffsetInBlock: 131288 2015-04-28 15:10:59,086 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 36 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1683428 flag: 0 flag: 0 2015-04-28 15:10:59,086 DEBUG [DataStreamer for file /staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist block BP-340492689-IP127-1430213926415:blk_1073741846_1022] org.apache.hadoop.hdfs.DFSClient: Closing old block BP-340492689-IP127-1430213926415:blk_1073741846_1022 2015-04-28 15:10:59,087 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #101 2015-04-28 15:10:59,107 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #101 2015-04-28 15:10:59,107 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: complete took 21ms 2015-04-28 15:10:59,108 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001.summary_tmp: masked=rw-r--r-- 2015-04-28 15:10:59,109 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #102 2015-04-28 15:10:59,132 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #102 2015-04-28 15:10:59,133 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: create took 25ms 2015-04-28 15:10:59,133 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001.summary_tmp, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:59,135 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=0, src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001.summary_tmp, packetSize=65016, chunksPerPacket=126, bytesCurBlock=0 2015-04-28 15:10:59,135 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 0 2015-04-28 15:10:59,135 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 1 2015-04-28 15:10:59,135 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 1 2015-04-28 15:10:59,135 DEBUG [Thread-100] org.apache.hadoop.hdfs.DFSClient: Allocating new block 2015-04-28 15:10:59,135 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #103 2015-04-28 15:10:59,142 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #194 2015-04-28 15:10:59,142 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#194 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:59,143 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:59,143 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:10:59,143 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#194 Retry#0 2015-04-28 15:10:59,143 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#194 Retry#0 Wrote 99 bytes. 2015-04-28 15:10:59,145 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #195 2015-04-28 15:10:59,145 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#195 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:59,145 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:59,146 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 1 2015-04-28 15:10:59,146 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#195 Retry#0 2015-04-28 15:10:59,146 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#195 Retry#0 Wrote 272 bytes. 2015-04-28 15:10:59,148 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #196 2015-04-28 15:10:59,148 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#196 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:10:59,148 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:10:59,148 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:10:59,148 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#196 Retry#0 2015-04-28 15:10:59,148 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#196 Retry#0 Wrote 272 bytes. 2015-04-28 15:10:59,158 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #103 2015-04-28 15:10:59,158 DEBUG [Thread-100] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: addBlock took 23ms 2015-04-28 15:10:59,158 DEBUG [Thread-100] org.apache.hadoop.hdfs.DFSClient: pipeline = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:59,158 DEBUG [Thread-100] org.apache.hadoop.hdfs.DFSClient: pipeline = DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK] 2015-04-28 15:10:59,158 DEBUG [Thread-100] org.apache.hadoop.hdfs.DFSClient: Connecting to datanode IP143:50076 2015-04-28 15:10:59,158 DEBUG [Thread-100] org.apache.hadoop.hdfs.DFSClient: Send buf size 131072 2015-04-28 15:10:59,159 DEBUG [Thread-100] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient: SASL client skipping handshake in unsecured configuration for addr = /IP143, datanodeId = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:59,164 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001.summary_tmp block BP-340492689-IP127-1430213926415:blk_1073741849_1025] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741849_1025 sending packet packet seqno: 0 offsetInBlock: 0 lastPacketInBlock: false lastByteOffsetInBlock: 355 2015-04-28 15:10:59,165 DEBUG [IPC Server idle connection scanner for port 21207] org.apache.hadoop.ipc.Server: IPC Server idle connection scanner for port 21207: task running 2015-04-28 15:10:59,167 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741849_1025] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 0 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1491412 flag: 0 flag: 0 2015-04-28 15:10:59,168 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001.summary_tmp block BP-340492689-IP127-1430213926415:blk_1073741849_1025] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741849_1025 sending packet packet seqno: 1 offsetInBlock: 355 lastPacketInBlock: true lastByteOffsetInBlock: 355 2015-04-28 15:10:59,171 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741849_1025] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 1 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1239552 flag: 0 flag: 0 2015-04-28 15:10:59,171 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001.summary_tmp block BP-340492689-IP127-1430213926415:blk_1073741849_1025] org.apache.hadoop.hdfs.DFSClient: Closing old block BP-340492689-IP127-1430213926415:blk_1073741849_1025 2015-04-28 15:10:59,172 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #104 2015-04-28 15:10:59,191 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #104 2015-04-28 15:10:59,191 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: complete took 20ms 2015-04-28 15:10:59,191 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #105 2015-04-28 15:10:59,207 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #105 2015-04-28 15:10:59,207 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: setPermission took 16ms 2015-04-28 15:10:59,222 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #106 2015-04-28 15:10:59,223 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #106 2015-04-28 15:10:59,223 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms 2015-04-28 15:10:59,224 INFO [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Copying hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist to hdfs://hacluster:8020/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp 2015-04-28 15:10:59,224 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #107 2015-04-28 15:10:59,225 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #107 2015-04-28 15:10:59,225 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms 2015-04-28 15:10:59,230 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #108 2015-04-28 15:10:59,231 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #108 2015-04-28 15:10:59,232 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:59,232 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #109 2015-04-28 15:10:59,233 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #109 2015-04-28 15:10:59,233 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms 2015-04-28 15:10:59,234 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #110 2015-04-28 15:10:59,235 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #110 2015-04-28 15:10:59,235 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getBlockLocations took 2ms 2015-04-28 15:10:59,236 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: newInfo = LocatedBlocks{ fileLength=131288 underConstruction=false blocks=[LocatedBlock{BP-340492689-IP127-1430213926415:blk_1073741846_1022; getBlockSize()=131288; corrupt=false; offset=0; locs=[DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK], DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK]]}] lastLocatedBlock=LocatedBlock{BP-340492689-IP127-1430213926415:blk_1073741846_1022; getBlockSize()=131288; corrupt=false; offset=0; locs=[DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK], DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK]]} isLastBlockComplete=true} 2015-04-28 15:10:59,236 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp: masked=rw-r--r-- 2015-04-28 15:10:59,237 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #111 2015-04-28 15:10:59,257 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #111 2015-04-28 15:10:59,257 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: create took 21ms 2015-04-28 15:10:59,258 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:59,258 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Connecting to datanode IP143:50076 2015-04-28 15:10:59,259 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient: SASL client skipping handshake in unsecured configuration for addr = /IP143, datanodeId = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:59,261 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=0, src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp, packetSize=65016, chunksPerPacket=126, bytesCurBlock=0 2015-04-28 15:10:59,261 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk packet full seqno=0, src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp, bytesCurBlock=64512, blockSize=33554432, appendChunk=false 2015-04-28 15:10:59,261 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 0 2015-04-28 15:10:59,261 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:59,261 DEBUG [Thread-102] org.apache.hadoop.hdfs.DFSClient: Allocating new block 2015-04-28 15:10:59,261 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #112 2015-04-28 15:10:59,262 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=1, src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp, packetSize=65016, chunksPerPacket=126, bytesCurBlock=64512 2015-04-28 15:10:59,262 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk packet full seqno=1, src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp, bytesCurBlock=129024, blockSize=33554432, appendChunk=false 2015-04-28 15:10:59,262 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 1 2015-04-28 15:10:59,262 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:59,262 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=2, src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp, packetSize=65016, chunksPerPacket=126, bytesCurBlock=129024 2015-04-28 15:10:59,262 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 2 2015-04-28 15:10:59,262 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 3 2015-04-28 15:10:59,262 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 3 2015-04-28 15:10:59,282 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #112 2015-04-28 15:10:59,283 DEBUG [Thread-102] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: addBlock took 22ms 2015-04-28 15:10:59,283 DEBUG [Thread-102] org.apache.hadoop.hdfs.DFSClient: pipeline = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:59,283 DEBUG [Thread-102] org.apache.hadoop.hdfs.DFSClient: pipeline = DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK] 2015-04-28 15:10:59,283 DEBUG [Thread-102] org.apache.hadoop.hdfs.DFSClient: Connecting to datanode IP143:50076 2015-04-28 15:10:59,283 DEBUG [Thread-102] org.apache.hadoop.hdfs.DFSClient: Send buf size 131072 2015-04-28 15:10:59,283 DEBUG [Thread-102] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient: SASL client skipping handshake in unsecured configuration for addr = /IP143, datanodeId = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:59,290 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp block BP-340492689-IP127-1430213926415:blk_1073741850_1026] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741850_1026 sending packet packet seqno: 0 offsetInBlock: 0 lastPacketInBlock: false lastByteOffsetInBlock: 64512 2015-04-28 15:10:59,291 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp block BP-340492689-IP127-1430213926415:blk_1073741850_1026] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741850_1026 sending packet packet seqno: 1 offsetInBlock: 64512 lastPacketInBlock: false lastByteOffsetInBlock: 129024 2015-04-28 15:10:59,291 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp block BP-340492689-IP127-1430213926415:blk_1073741850_1026] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741850_1026 sending packet packet seqno: 2 offsetInBlock: 129024 lastPacketInBlock: false lastByteOffsetInBlock: 131288 2015-04-28 15:10:59,301 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741850_1026] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 0 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 7875899 flag: 0 flag: 0 2015-04-28 15:10:59,305 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741850_1026] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 1 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 12850332 flag: 0 flag: 0 2015-04-28 15:10:59,305 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741850_1026] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 2 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 5062625 flag: 0 flag: 0 2015-04-28 15:10:59,305 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp block BP-340492689-IP127-1430213926415:blk_1073741850_1026] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741850_1026 sending packet packet seqno: 3 offsetInBlock: 131288 lastPacketInBlock: true lastByteOffsetInBlock: 131288 2015-04-28 15:10:59,310 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741850_1026] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 3 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 2132707 flag: 0 flag: 0 2015-04-28 15:10:59,310 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp block BP-340492689-IP127-1430213926415:blk_1073741850_1026] org.apache.hadoop.hdfs.DFSClient: Closing old block BP-340492689-IP127-1430213926415:blk_1073741850_1026 2015-04-28 15:10:59,311 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #113 2015-04-28 15:10:59,332 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #113 2015-04-28 15:10:59,332 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: complete took 22ms 2015-04-28 15:10:59,333 INFO [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Copied to done location: hdfs://hacluster:8020/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp 2015-04-28 15:10:59,333 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #114 2015-04-28 15:10:59,357 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #114 2015-04-28 15:10:59,357 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: setPermission took 24ms 2015-04-28 15:10:59,358 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #115 2015-04-28 15:10:59,360 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #115 2015-04-28 15:10:59,360 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:59,360 INFO [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Copying hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1_conf.xml to hdfs://hacluster:8020/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp 2015-04-28 15:10:59,361 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #116 2015-04-28 15:10:59,362 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #116 2015-04-28 15:10:59,362 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:59,362 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #117 2015-04-28 15:10:59,364 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #117 2015-04-28 15:10:59,364 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 2ms 2015-04-28 15:10:59,364 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #118 2015-04-28 15:10:59,365 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #118 2015-04-28 15:10:59,365 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms 2015-04-28 15:10:59,366 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #119 2015-04-28 15:10:59,368 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #119 2015-04-28 15:10:59,368 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: getBlockLocations took 2ms 2015-04-28 15:10:59,369 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: newInfo = LocatedBlocks{ fileLength=119732 underConstruction=false blocks=[LocatedBlock{BP-340492689-IP127-1430213926415:blk_1073741845_1021; getBlockSize()=119732; corrupt=false; offset=0; locs=[DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK], DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK]]}] lastLocatedBlock=LocatedBlock{BP-340492689-IP127-1430213926415:blk_1073741845_1021; getBlockSize()=119732; corrupt=false; offset=0; locs=[DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK], DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK]]} isLastBlockComplete=true} 2015-04-28 15:10:59,369 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp: masked=rw-r--r-- 2015-04-28 15:10:59,369 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #120 2015-04-28 15:10:59,391 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #120 2015-04-28 15:10:59,391 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: create took 22ms 2015-04-28 15:10:59,391 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:59,392 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Connecting to datanode IP143:50076 2015-04-28 15:10:59,394 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=0, src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp, packetSize=65016, chunksPerPacket=126, bytesCurBlock=0 2015-04-28 15:10:59,395 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk packet full seqno=0, src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp, bytesCurBlock=64512, blockSize=33554432, appendChunk=false 2015-04-28 15:10:59,395 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 0 2015-04-28 15:10:59,395 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: computePacketChunkSize: src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp, chunkSize=516, chunksPerPacket=126, packetSize=65016 2015-04-28 15:10:59,395 DEBUG [Thread-104] org.apache.hadoop.hdfs.DFSClient: Allocating new block 2015-04-28 15:10:59,395 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #121 2015-04-28 15:10:59,396 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: DFSClient writeChunk allocating new packet seqno=1, src=/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp, packetSize=65016, chunksPerPacket=126, bytesCurBlock=64512 2015-04-28 15:10:59,397 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 1 2015-04-28 15:10:59,397 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Queued packet 2 2015-04-28 15:10:59,397 DEBUG [eventHandlingThread] org.apache.hadoop.hdfs.DFSClient: Waiting for ack for: 2 2015-04-28 15:10:59,416 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #121 2015-04-28 15:10:59,416 DEBUG [Thread-104] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: addBlock took 21ms 2015-04-28 15:10:59,416 DEBUG [Thread-104] org.apache.hadoop.hdfs.DFSClient: pipeline = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:59,416 DEBUG [Thread-104] org.apache.hadoop.hdfs.DFSClient: pipeline = DatanodeInfoWithStorage[IP117:50076,DS-2ce3e8ee-88fe-4907-bb67-a0731b910895,DISK] 2015-04-28 15:10:59,417 DEBUG [Thread-104] org.apache.hadoop.hdfs.DFSClient: Connecting to datanode IP143:50076 2015-04-28 15:10:59,417 DEBUG [Thread-104] org.apache.hadoop.hdfs.DFSClient: Send buf size 131072 2015-04-28 15:10:59,417 DEBUG [Thread-104] org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient: SASL client skipping handshake in unsecured configuration for addr = /IP143, datanodeId = DatanodeInfoWithStorage[IP143:50076,DS-bcd3c7ab-29f3-4aad-8c9d-854f1042a31a,DISK] 2015-04-28 15:10:59,419 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #122 2015-04-28 15:10:59,424 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp block BP-340492689-IP127-1430213926415:blk_1073741851_1027] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741851_1027 sending packet packet seqno: 0 offsetInBlock: 0 lastPacketInBlock: false lastByteOffsetInBlock: 64512 2015-04-28 15:10:59,424 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp block BP-340492689-IP127-1430213926415:blk_1073741851_1027] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741851_1027 sending packet packet seqno: 1 offsetInBlock: 64512 lastPacketInBlock: false lastByteOffsetInBlock: 119732 2015-04-28 15:10:59,426 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #122 2015-04-28 15:10:59,426 DEBUG [LeaseRenewer:dsperf@hacluster:8020] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: renewLease took 7ms 2015-04-28 15:10:59,427 DEBUG [LeaseRenewer:dsperf@hacluster:8020] org.apache.hadoop.hdfs.LeaseRenewer: Lease renewed for client DFSClient_NONMAPREDUCE_-704522727_1 2015-04-28 15:10:59,427 DEBUG [LeaseRenewer:dsperf@hacluster:8020] org.apache.hadoop.hdfs.LeaseRenewer: Lease renewer daemon for [DFSClient_NONMAPREDUCE_-704522727_1] with renew id 1 executed 2015-04-28 15:10:59,433 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741851_1027] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 0 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 8028066 flag: 0 flag: 0 2015-04-28 15:10:59,437 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741851_1027] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 1 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 11409203 flag: 0 flag: 0 2015-04-28 15:10:59,437 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp block BP-340492689-IP127-1430213926415:blk_1073741851_1027] org.apache.hadoop.hdfs.DFSClient: DataStreamer block BP-340492689-IP127-1430213926415:blk_1073741851_1027 sending packet packet seqno: 2 offsetInBlock: 119732 lastPacketInBlock: true lastByteOffsetInBlock: 119732 2015-04-28 15:10:59,442 DEBUG [ResponseProcessor for block BP-340492689-IP127-1430213926415:blk_1073741851_1027] org.apache.hadoop.hdfs.DFSClient: DFSClient seqno: 2 reply: SUCCESS reply: SUCCESS downstreamAckTimeNanos: 1845869 flag: 0 flag: 0 2015-04-28 15:10:59,442 DEBUG [DataStreamer for file /staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp block BP-340492689-IP127-1430213926415:blk_1073741851_1027] org.apache.hadoop.hdfs.DFSClient: Closing old block BP-340492689-IP127-1430213926415:blk_1073741851_1027 2015-04-28 15:10:59,442 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #123 2015-04-28 15:10:59,466 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #123 2015-04-28 15:10:59,466 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: complete took 24ms 2015-04-28 15:10:59,466 INFO [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Copied to done location: hdfs://hacluster:8020/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp 2015-04-28 15:10:59,466 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #124 2015-04-28 15:10:59,482 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #124 2015-04-28 15:10:59,482 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: setPermission took 16ms 2015-04-28 15:10:59,483 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #125 2015-04-28 15:10:59,507 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #125 2015-04-28 15:10:59,507 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: rename took 24ms 2015-04-28 15:10:59,508 INFO [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Moved tmp to done: hdfs://hacluster:8020/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001.summary_tmp to hdfs://hacluster:8020/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001.summary 2015-04-28 15:10:59,508 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #126 2015-04-28 15:10:59,532 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #126 2015-04-28 15:10:59,533 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: rename took 25ms 2015-04-28 15:10:59,533 INFO [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Moved tmp to done: hdfs://hacluster:8020/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml_tmp to hdfs://hacluster:8020/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001_conf.xml 2015-04-28 15:10:59,534 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #127 2015-04-28 15:10:59,557 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #127 2015-04-28 15:10:59,558 DEBUG [eventHandlingThread] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: rename took 25ms 2015-04-28 15:10:59,558 INFO [eventHandlingThread] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Moved tmp to done: hdfs://hacluster:8020/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist_tmp to hdfs://hacluster:8020/staging-dir/history/done_intermediate/dsperf/job_1430213948957_0001-1430214013122-dsperf-QuasiMonteCarlo-1430214059067-16-1-SUCCEEDED-root.default-1430214029339.jhist 2015-04-28 15:10:59,558 DEBUG [Thread-99] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Interrupting Event Handling thread 2015-04-28 15:10:59,558 DEBUG [Thread-99] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Waiting for Event Handling thread to complete 2015-04-28 15:10:59,558 DEBUG [Thread-99] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Shutting down timer for Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:59,558 DEBUG [Thread-99] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Shutting down timer Job MetaInfo for job_1430213948957_0001 history file hdfs://hacluster:8020/staging-dir/dsperf/.staging/job_1430213948957_0001/job_1430213948957_0001_1.jhist 2015-04-28 15:10:59,559 DEBUG [Thread-99] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Closing Writer 2015-04-28 15:10:59,559 INFO [Thread-99] org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler: Stopped JobHistoryEventHandler. super.stop() 2015-04-28 15:10:59,559 DEBUG [Thread-99] org.apache.hadoop.service.CompositeService: Stopping service #5: Service org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerLauncherRouter in state org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerLauncherRouter: STARTED 2015-04-28 15:10:59,559 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerLauncherRouter entered state STOPPED 2015-04-28 15:10:59,559 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl entered state STOPPED 2015-04-28 15:10:59,560 DEBUG [Thread-99] org.apache.hadoop.service.CompositeService: Stopping service #4: Service org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter in state org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter: STARTED 2015-04-28 15:10:59,560 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter entered state STOPPED 2015-04-28 15:10:59,560 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: RMCommunicator entered state STOPPED 2015-04-28 15:10:59,562 INFO [Thread-99] org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator: Setting job diagnostics to 2015-04-28 15:10:59,564 INFO [Thread-99] org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator: History url is http://host-128:64322/jobhistory/job/job_1430213948957_0001 2015-04-28 15:10:59,569 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #128 2015-04-28 15:10:59,582 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #128 2015-04-28 15:10:59,583 DEBUG [Thread-99] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: finishApplicationMaster took 14ms 2015-04-28 15:10:59,585 INFO [Thread-99] org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator: Waiting for application to be successfully unregistered. 2015-04-28 15:11:00,151 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #197 2015-04-28 15:11:00,151 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#197 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:11:00,151 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:11:00,152 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getTaskAttemptCompletionEvents queueTime= 1 procesingTime= 0 2015-04-28 15:11:00,152 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#197 Retry#0 2015-04-28 15:11:00,152 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getTaskAttemptCompletionEvents from IP117:43359 Call#197 Retry#0 Wrote 33 bytes. 2015-04-28 15:11:00,154 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #198 2015-04-28 15:11:00,154 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#198 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:11:00,154 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:11:00,154 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:11:00,154 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#198 Retry#0 2015-04-28 15:11:00,154 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#198 Retry#0 Wrote 272 bytes. 2015-04-28 15:11:00,156 DEBUG [Socket Reader #1 for port 48332] org.apache.hadoop.ipc.Server: got #199 2015-04-28 15:11:00,156 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#199 Retry#0 for RpcKind RPC_PROTOCOL_BUFFER 2015-04-28 15:11:00,156 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.security.UserGroupInformation: PrivilegedAction as:dsperf (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043) 2015-04-28 15:11:00,156 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: Served: getJobReport queueTime= 0 procesingTime= 0 2015-04-28 15:11:00,156 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#199 Retry#0 2015-04-28 15:11:00,156 DEBUG [IPC Server handler 0 on 48332] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 48332: responding to org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB.getJobReport from IP117:43359 Call#199 Retry#0 Wrote 272 bytes. 2015-04-28 15:11:00,585 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf sending #129 2015-04-28 15:11:00,588 DEBUG [IPC Client (1139814130) connection to /IP127:45017 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:45017 from dsperf got value #129 2015-04-28 15:11:00,588 DEBUG [Thread-99] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: finishApplicationMaster took 3ms 2015-04-28 15:11:00,588 INFO [Thread-99] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Final Stats: PendingReds:0 ScheduledMaps:0 ScheduledReds:0 AssignedMaps:0 AssignedReds:1 CompletedMaps:16 CompletedReds:0 ContAlloc:18 ContRel:1 HostLocal:16 RackLocal:0 2015-04-28 15:11:00,588 DEBUG [Thread-99] org.apache.hadoop.service.CompositeService: Stopping service #3: Service org.apache.hadoop.mapreduce.v2.app.MRAppMaster$StagingDirCleaningService in state org.apache.hadoop.mapreduce.v2.app.MRAppMaster$StagingDirCleaningService: STARTED 2015-04-28 15:11:00,588 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapreduce.v2.app.MRAppMaster$StagingDirCleaningService entered state STOPPED 2015-04-28 15:11:00,589 INFO [Thread-99] org.apache.hadoop.mapreduce.v2.app.MRAppMaster: Deleting staging directory hdfs://hacluster /staging-dir/dsperf/.staging/job_1430213948957_0001 2015-04-28 15:11:00,590 DEBUG [IPC Parameter Sending Thread #0] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf sending #130 2015-04-28 15:11:00,609 DEBUG [IPC Client (1139814130) connection to /IP127:65110 from dsperf] org.apache.hadoop.ipc.Client: IPC Client (1139814130) connection to /IP127:65110 from dsperf got value #130 2015-04-28 15:11:00,609 DEBUG [Thread-99] org.apache.hadoop.ipc.ProtobufRpcEngine: Call: delete took 19ms 2015-04-28 15:11:00,609 DEBUG [Thread-99] org.apache.hadoop.service.CompositeService: Stopping service #2: Service org.apache.hadoop.mapred.TaskAttemptListenerImpl in state org.apache.hadoop.mapred.TaskAttemptListenerImpl: STARTED 2015-04-28 15:11:00,609 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: org.apache.hadoop.mapred.TaskAttemptListenerImpl entered state STOPPED 2015-04-28 15:11:00,609 INFO [Thread-99] org.apache.hadoop.ipc.Server: Stopping server on 21207 2015-04-28 15:11:00,609 DEBUG [IPC Server handler 1 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 1 on 21207: exiting 2015-04-28 15:11:00,609 DEBUG [IPC Server handler 3 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 3 on 21207: exiting 2015-04-28 15:11:00,609 DEBUG [IPC Server handler 2 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 2 on 21207: exiting 2015-04-28 15:11:00,609 DEBUG [IPC Server handler 0 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 0 on 21207: exiting 2015-04-28 15:11:00,609 DEBUG [IPC Server handler 4 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 4 on 21207: exiting 2015-04-28 15:11:00,609 DEBUG [IPC Server handler 5 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 5 on 21207: exiting 2015-04-28 15:11:00,609 DEBUG [IPC Server handler 6 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 6 on 21207: exiting 2015-04-28 15:11:00,609 DEBUG [IPC Server handler 7 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 7 on 21207: exiting 2015-04-28 15:11:00,609 DEBUG [IPC Server handler 8 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 8 on 21207: exiting 2015-04-28 15:11:00,609 DEBUG [IPC Server handler 9 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 9 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 10 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 10 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 12 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 12 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 11 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 11 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 13 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 13 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 14 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 14 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 15 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 15 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 16 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 16 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 17 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 17 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 18 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 18 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 19 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 19 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 20 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 20 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 21 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 21 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 22 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 22 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 23 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 23 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 24 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 24 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 25 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 25 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 26 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 26 on 21207: exiting 2015-04-28 15:11:00,610 DEBUG [IPC Server handler 27 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 27 on 21207: exiting 2015-04-28 15:11:00,611 DEBUG [IPC Server handler 28 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 28 on 21207: exiting 2015-04-28 15:11:00,611 DEBUG [IPC Server handler 29 on 21207] org.apache.hadoop.ipc.Server: IPC Server handler 29 on 21207: exiting 2015-04-28 15:11:00,611 INFO [IPC Server listener on 21207] org.apache.hadoop.ipc.Server: Stopping IPC Server listener on 21207 2015-04-28 15:11:00,611 DEBUG [Thread-99] org.apache.hadoop.service.CompositeService: org.apache.hadoop.mapred.TaskAttemptListenerImpl: stopping services, size=1 2015-04-28 15:11:00,611 DEBUG [Thread-99] org.apache.hadoop.service.CompositeService: Stopping service #0: Service TaskHeartbeatHandler in state TaskHeartbeatHandler: STARTED 2015-04-28 15:11:00,611 DEBUG [IPC Server Responder] org.apache.hadoop.ipc.Server: Checking for old call responses. 2015-04-28 15:11:00,611 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: TaskHeartbeatHandler entered state STOPPED 2015-04-28 15:11:00,611 INFO [IPC Server Responder] org.apache.hadoop.ipc.Server: Stopping IPC Server Responder 2015-04-28 15:11:00,612 INFO [TaskHeartbeatHandler PingChecker] org.apache.hadoop.mapreduce.v2.app.TaskHeartbeatHandler: TaskHeartbeatHandler thread interrupted 2015-04-28 15:11:00,612 DEBUG [Thread-99] org.apache.hadoop.service.CompositeService: Stopping service #1: Service CommitterEventHandler in state CommitterEventHandler: STARTED 2015-04-28 15:11:00,612 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: CommitterEventHandler entered state STOPPED 2015-04-28 15:11:00,612 DEBUG [Thread-99] org.apache.hadoop.service.CompositeService: Stopping service #0: Service Dispatcher in state Dispatcher: STARTED 2015-04-28 15:11:00,612 DEBUG [Thread-99] org.apache.hadoop.service.AbstractService: Service: Dispatcher entered state STOPPED