Task Logs: 'attempt_201310110032_0023_r_000000_0'



stdout logs



stderr logs
org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:240)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:515)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.closeOp(FileSinkOperator.java:859)
	at org.apache.hadoop.hive.ql.exec.Operator.close(Operator.java:588)
	at org.apache.hadoop.hive.ql.exec.Operator.close(Operator.java:597)
	at org.apache.hadoop.hive.ql.exec.Operator.close(Operator.java:597)
	at org.apache.hadoop.hive.ql.exec.ExecReducer.close(ExecReducer.java:309)
	at org.apache.hadoop.io.IOUtils.cleanup(IOUtils.java:232)
	at org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:539)
	at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:421)
	at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: java.lang.RuntimeException: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$1.run(SecureClient.java:263)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.util.Methods.call(Methods.java:37)
	at org.apache.hadoop.hbase.security.User.call(User.java:590)
	at org.apache.hadoop.hbase.security.User.access$700(User.java:51)
	at org.apache.hadoop.hbase.security.User$SecureHadoopUser.runAs(User.java:444)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.handleSaslConnectionFailure(SecureClient.java:224)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupIOstreams(SecureClient.java:313)
	at org.apache.hadoop.hbase.ipc.HBaseClient.getConnection(HBaseClient.java:1124)
	at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:974)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine$Invoker.invoke(SecureRpcEngine.java:104)
	at $Proxy10.getProtocolVersion(Unknown Source)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine.getProxy(SecureRpcEngine.java:146)
	at org.apache.hadoop.hbase.ipc.HBaseRPC.waitForProxy(HBaseRPC.java:208)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1346)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1305)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1292)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:1001)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:896)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:998)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:900)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:857)
	at org.apache.hadoop.hbase.client.HTable.finishSetup(HTable.java:234)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:174)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:133)
	at org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat.getHiveRecordWriter(HiveHBaseTableOutputFormat.java:83)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:250)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:237)
	... 14 more
Caused by: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:194)
	at org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect(HBaseSaslRpcClient.java:141)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupSaslConnection(SecureClient.java:198)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.access$600(SecureClient.java:96)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:306)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:303)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.util.Methods.call(Methods.java:37)
	at org.apache.hadoop.hbase.security.User.call(User.java:590)
	at org.apache.hadoop.hbase.security.User.access$700(User.java:51)
	at org.apache.hadoop.hbase.security.User$SecureHadoopUser.runAs(User.java:444)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupIOstreams(SecureClient.java:302)
	... 34 more
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
	at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:130)
	at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:106)
	at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:172)
	at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:209)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:195)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:162)
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:175)
	... 51 more
org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:240)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:515)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.closeOp(FileSinkOperator.java:859)
	at org.apache.hadoop.hive.ql.exec.Operator.close(Operator.java:588)
	at org.apache.hadoop.hive.ql.exec.Operator.close(Operator.java:597)
	at org.apache.hadoop.hive.ql.exec.Operator.close(Operator.java:597)
	at org.apache.hadoop.hive.ql.exec.ExecReducer.close(ExecReducer.java:309)
	at org.apache.hadoop.io.IOUtils.cleanup(IOUtils.java:232)
	at org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:539)
	at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:421)
	at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: java.lang.RuntimeException: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$1.run(SecureClient.java:263)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.util.Methods.call(Methods.java:37)
	at org.apache.hadoop.hbase.security.User.call(User.java:590)
	at org.apache.hadoop.hbase.security.User.access$700(User.java:51)
	at org.apache.hadoop.hbase.security.User$SecureHadoopUser.runAs(User.java:444)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.handleSaslConnectionFailure(SecureClient.java:224)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupIOstreams(SecureClient.java:313)
	at org.apache.hadoop.hbase.ipc.HBaseClient.getConnection(HBaseClient.java:1124)
	at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:974)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine$Invoker.invoke(SecureRpcEngine.java:104)
	at $Proxy10.getProtocolVersion(Unknown Source)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine.getProxy(SecureRpcEngine.java:146)
	at org.apache.hadoop.hbase.ipc.HBaseRPC.waitForProxy(HBaseRPC.java:208)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1346)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1305)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1292)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:1001)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:896)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:998)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:900)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:857)
	at org.apache.hadoop.hbase.client.HTable.finishSetup(HTable.java:234)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:174)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:133)
	at org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat.getHiveRecordWriter(HiveHBaseTableOutputFormat.java:83)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:250)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:237)
	... 14 more
Caused by: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:194)
	at org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect(HBaseSaslRpcClient.java:141)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupSaslConnection(SecureClient.java:198)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.access$600(SecureClient.java:96)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:306)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:303)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.util.Methods.call(Methods.java:37)
	at org.apache.hadoop.hbase.security.User.call(User.java:590)
	at org.apache.hadoop.hbase.security.User.access$700(User.java:51)
	at org.apache.hadoop.hbase.security.User$SecureHadoopUser.runAs(User.java:444)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupIOstreams(SecureClient.java:302)
	... 34 more
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
	at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:130)
	at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:106)
	at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:172)
	at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:209)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:195)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:162)
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:175)
	... 51 more



syslog logs
2013-10-11 21:37:31,884 INFO org.apache.hadoop.util.NativeCodeLoader: Loaded the native-hadoop library
2013-10-11 21:37:31,987 INFO org.apache.hadoop.mapred.TaskRunner: Creating symlink: /grid/4/hdp/mapred/local/taskTracker/hrt_qa/distcache/7598786190354159294_-557903292_695434595/hor18n34.gq1.ygridcore.net/tmp/hive-hrt_qa/hive_2013-10-11_21-36-56_411_6323434890121138621/-mr-10001/6b7521be-67fb-41b2-9755-610ca30e515c <- /grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/HIVE_PLAN6b7521be-67fb-41b2-9755-610ca30e515c
2013-10-11 21:37:31,992 INFO org.apache.hadoop.filecache.TrackerDistributedCacheManager: Creating symlink: /grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/hive-exec-log4j.properties <- /grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/hive-exec-log4j.properties
2013-10-11 21:37:31,994 INFO org.apache.hadoop.filecache.TrackerDistributedCacheManager: Creating symlink: /grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/javax <- /grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/javax
2013-10-11 21:37:31,996 INFO org.apache.hadoop.filecache.TrackerDistributedCacheManager: Creating symlink: /grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/job.jar <- /grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/job.jar
2013-10-11 21:37:31,997 INFO org.apache.hadoop.filecache.TrackerDistributedCacheManager: Creating symlink: /grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/javaewah <- /grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/javaewah
2013-10-11 21:37:31,999 INFO org.apache.hadoop.filecache.TrackerDistributedCacheManager: Creating symlink: /grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/com <- /grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/com
2013-10-11 21:37:32,000 INFO org.apache.hadoop.filecache.TrackerDistributedCacheManager: Creating symlink: /grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/org <- /grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/org
2013-10-11 21:37:32,002 INFO org.apache.hadoop.filecache.TrackerDistributedCacheManager: Creating symlink: /grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/javolution <- /grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/javolution
2013-10-11 21:37:32,004 INFO org.apache.hadoop.filecache.TrackerDistributedCacheManager: Creating symlink: /grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/META-INF <- /grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/META-INF
2013-10-11 21:37:32,005 INFO org.apache.hadoop.filecache.TrackerDistributedCacheManager: Creating symlink: /grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/.job.jar.crc <- /grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/.job.jar.crc
2013-10-11 21:37:32,238 INFO org.apache.hadoop.util.ProcessTree: setsid exited with exit code 0
2013-10-11 21:37:32,243 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorPlugin : org.apache.hadoop.util.LinuxResourceCalculatorPlugin@b3e15f7
2013-10-11 21:37:32,353 WARN org.apache.hadoop.io.compress.snappy.LoadSnappy: Snappy native library is available
2013-10-11 21:37:32,353 INFO org.apache.hadoop.io.compress.snappy.LoadSnappy: Snappy native library loaded
2013-10-11 21:37:32,362 INFO org.apache.hadoop.mapred.ReduceTask: ShuffleRamManager: MemoryLimit=456733472, MaxSingleShuffleLimit=114183368
2013-10-11 21:37:32,367 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,367 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,369 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,369 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,370 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,371 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,371 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,372 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,372 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,373 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,373 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,374 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,375 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,376 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,376 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,377 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,377 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,378 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,378 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,379 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,379 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,380 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,380 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,381 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,381 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,382 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,382 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,383 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor
2013-10-11 21:37:32,385 INFO org.apache.hadoop.mapred.ReduceTask: attempt_201310110032_0023_r_000000_0 Thread started: Thread for merging on-disk files
2013-10-11 21:37:32,385 INFO org.apache.hadoop.mapred.ReduceTask: attempt_201310110032_0023_r_000000_0 Thread waiting: Thread for merging on-disk files
2013-10-11 21:37:32,385 INFO org.apache.hadoop.mapred.ReduceTask: attempt_201310110032_0023_r_000000_0 Thread started: Thread for merging in memory files
2013-10-11 21:37:32,386 INFO org.apache.hadoop.mapred.ReduceTask: attempt_201310110032_0023_r_000000_0 Need another 9 map output(s) where 0 is already in progress
2013-10-11 21:37:32,387 INFO org.apache.hadoop.mapred.ReduceTask: attempt_201310110032_0023_r_000000_0 Scheduled 0 outputs (0 slow hosts and0 dup hosts)
2013-10-11 21:37:32,387 INFO org.apache.hadoop.mapred.ReduceTask: attempt_201310110032_0023_r_000000_0 Thread started: Thread for polling Map Completion Events
2013-10-11 21:37:37,387 INFO org.apache.hadoop.mapred.ReduceTask: attempt_201310110032_0023_r_000000_0 Scheduled 3 outputs (0 slow hosts and0 dup hosts)
2013-10-11 21:37:37,534 INFO org.apache.hadoop.mapred.ReduceTask: attempt_201310110032_0023_r_000000_0 Scheduled 2 outputs (0 slow hosts and0 dup hosts)
2013-10-11 21:37:37,544 INFO org.apache.hadoop.mapred.ReduceTask: attempt_201310110032_0023_r_000000_0 Scheduled 2 outputs (0 slow hosts and0 dup hosts)
2013-10-11 21:37:37,548 INFO org.apache.hadoop.mapred.ReduceTask: attempt_201310110032_0023_r_000000_0 Scheduled 2 outputs (0 slow hosts and0 dup hosts)
2013-10-11 21:37:38,399 INFO org.apache.hadoop.mapred.ReduceTask: GetMapEventsThread exiting
2013-10-11 21:37:38,399 INFO org.apache.hadoop.mapred.ReduceTask: getMapsEventsThread joined.
2013-10-11 21:37:38,400 INFO org.apache.hadoop.mapred.ReduceTask: Closed ram manager
2013-10-11 21:37:38,400 INFO org.apache.hadoop.mapred.ReduceTask: Interleaved on-disk merge complete: 0 files left.
2013-10-11 21:37:38,400 INFO org.apache.hadoop.mapred.ReduceTask: In-memory merge complete: 9 files left.
2013-10-11 21:37:38,415 INFO org.apache.hadoop.mapred.Merger: Merging 9 sorted segments
2013-10-11 21:37:38,416 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 9 segments left of total size: 4549 bytes
2013-10-11 21:37:38,418 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor
2013-10-11 21:37:38,427 INFO org.apache.hadoop.mapred.ReduceTask: Merged 9 segments, 4549 bytes to disk to satisfy reduce memory limit
2013-10-11 21:37:38,427 INFO org.apache.hadoop.mapred.ReduceTask: Merging 1 files, 1985 bytes from disk
2013-10-11 21:37:38,428 INFO org.apache.hadoop.mapred.ReduceTask: Merging 0 segments, 0 bytes from memory into reduce
2013-10-11 21:37:38,428 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
2013-10-11 21:37:38,435 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4533 bytes
2013-10-11 21:37:38,445 INFO ExecReducer: maximum memory = 652476416
2013-10-11 21:37:38,445 INFO ExecReducer: conf classpath = [file:/grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/classes, file:/grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/, file:/grid/0/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/]
2013-10-11 21:37:38,445 INFO ExecReducer: thread classpath = [file:/etc/hadoop/conf.empty/, file:/usr/jdk64/jdk1.6.0_31/lib/tools.jar, file:/usr/lib/hadoop/, file:/usr/lib/hadoop/hadoop-core-1.2.0.1.3.2.0-111.jar, file:/usr/lib/hadoop/lib/asm-3.2.jar, file:/usr/lib/hadoop/lib/aspectjrt-1.6.11.jar, file:/usr/lib/hadoop/lib/aspectjtools-1.6.11.jar, file:/usr/lib/hadoop/lib/commons-beanutils-1.7.0.jar, file:/usr/lib/hadoop/lib/commons-beanutils-core-1.8.0.jar, file:/usr/lib/hadoop/lib/commons-cli-1.2.jar, file:/usr/lib/hadoop/lib/commons-codec-1.4.jar, file:/usr/lib/hadoop/lib/commons-collections-3.2.1.jar, file:/usr/lib/hadoop/lib/commons-configuration-1.6.jar, file:/usr/lib/hadoop/lib/commons-daemon-1.0.1.jar, file:/usr/lib/hadoop/lib/commons-digester-1.8.jar, file:/usr/lib/hadoop/lib/commons-el-1.0.jar, file:/usr/lib/hadoop/lib/commons-httpclient-3.0.1.jar, file:/usr/lib/hadoop/lib/commons-io-2.1.jar, file:/usr/lib/hadoop/lib/commons-lang-2.4.jar, file:/usr/lib/hadoop/lib/commons-logging-1.1.1.jar, file:/usr/lib/hadoop/lib/commons-logging-api-1.0.4.jar, file:/usr/lib/hadoop/lib/commons-math-2.1.jar, file:/usr/lib/hadoop/lib/commons-net-3.1.jar, file:/usr/lib/hadoop/lib/core-3.1.1.jar, file:/usr/lib/hadoop/lib/guava-11.0.2.jar, file:/usr/lib/hadoop/lib/hadoop-capacity-scheduler-1.2.0.1.3.2.0-111.jar, file:/usr/lib/hadoop/lib/hadoop-fairscheduler-1.2.0.1.3.2.0-111.jar, file:/usr/lib/hadoop/lib/hadoop-lzo-0.5.0.jar, file:/usr/lib/hadoop/lib/hadoop-thriftfs-1.2.0.1.3.2.0-111.jar, file:/usr/lib/hadoop/lib/hsqldb-1.8.0.10.jar, file:/usr/lib/hadoop/lib/jackson-core-asl-1.8.8.jar, file:/usr/lib/hadoop/lib/jackson-mapper-asl-1.8.8.jar, file:/usr/lib/hadoop/lib/jasper-compiler-5.5.12.jar, file:/usr/lib/hadoop/lib/jasper-runtime-5.5.12.jar, file:/usr/lib/hadoop/lib/jdeb-0.8.jar, file:/usr/lib/hadoop/lib/jersey-core-1.8.jar, file:/usr/lib/hadoop/lib/jersey-json-1.8.jar, file:/usr/lib/hadoop/lib/jersey-server-1.8.jar, file:/usr/lib/hadoop/lib/jets3t-0.6.1.jar, file:/usr/lib/hadoop/lib/jetty-6.1.26.jar, file:/usr/lib/hadoop/lib/jetty-util-6.1.26.jar, file:/usr/lib/hadoop/lib/jsch-0.1.42.jar, file:/usr/lib/hadoop/lib/junit-4.5.jar, file:/usr/lib/hadoop/lib/kfs-0.2.2.jar, file:/usr/lib/hadoop/lib/log4j-1.2.15.jar, file:/usr/lib/hadoop/lib/mockito-all-1.8.5.jar, file:/usr/lib/hadoop/lib/netty-3.6.2.Final.jar, file:/usr/lib/hadoop/lib/oro-2.0.8.jar, file:/usr/lib/hadoop/lib/servlet-api-2.5-20081211.jar, file:/usr/lib/hadoop/lib/slf4j-api-1.4.3.jar, file:/usr/lib/hadoop/lib/slf4j-log4j12-1.4.3.jar, file:/usr/lib/hadoop/lib/xmlenc-0.52.jar, file:/usr/lib/hadoop/lib/jsp-2.1/jsp-2.1.jar, file:/usr/lib/hadoop/lib/jsp-2.1/jsp-api-2.1.jar, file:/grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/classes, file:/grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/, file:/grid/5/hdp/mapred/local/taskTracker/hrt_qa/distcache/8475069732773479592_-1538157662_695434820/hor18n34.gq1.ygridcore.net/user/hrt_qa/.staging/job_201310110032_0023/libjars/hbase-site.xml/, file:/grid/0/hdp/mapred/local/taskTracker/hrt_qa/distcache/-5549176264816609739_223569749_695434869/hor18n34.gq1.ygridcore.net/user/hrt_qa/.staging/job_201310110032_0023/libjars/hive-hbase-handler-0.11.0.1.3.2.0-111.jar/, file:/grid/1/hdp/mapred/local/taskTracker/hrt_qa/distcache/3163893831459437436_507105717_695434978/hor18n34.gq1.ygridcore.net/user/hrt_qa/.staging/job_201310110032_0023/libjars/hbase-0.94.6.1.3.2.0-111-security.jar/, file:/grid/2/hdp/mapred/local/taskTracker/hrt_qa/distcache/1522721733283368288_-399969024_695435122/hor18n34.gq1.ygridcore.net/user/hrt_qa/.staging/job_201310110032_0023/libjars/zookeeper-3.4.5.1.3.2.0-111.jar/, file:/grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/]
2013-10-11 21:37:38,641 INFO ExecReducer: 
<EX>Id =6
  <Children>
    <LIM>Id =7
      <Children>
        <FS>Id =8
          <Parent>Id = 7 null<\Parent>
        <\FS>
      <\Children>
      <Parent>Id = 6 null<\Parent>
    <\LIM>
  <\Children>
<\EX>
2013-10-11 21:37:38,641 INFO org.apache.hadoop.hive.ql.exec.ExtractOperator: Initializing Self 6 EX
2013-10-11 21:37:38,645 INFO org.apache.hadoop.hive.ql.exec.ExtractOperator: Operator 6 EX initialized
2013-10-11 21:37:38,645 INFO org.apache.hadoop.hive.ql.exec.ExtractOperator: Initializing children of 6 EX
2013-10-11 21:37:38,645 INFO org.apache.hadoop.hive.ql.exec.LimitOperator: Initializing child 7 LIM
2013-10-11 21:37:38,645 INFO org.apache.hadoop.hive.ql.exec.LimitOperator: Initializing Self 7 LIM
2013-10-11 21:37:38,645 INFO org.apache.hadoop.hive.ql.exec.LimitOperator: Operator 7 LIM initialized
2013-10-11 21:37:38,645 INFO org.apache.hadoop.hive.ql.exec.LimitOperator: Initializing children of 7 LIM
2013-10-11 21:37:38,645 INFO org.apache.hadoop.hive.ql.exec.FileSinkOperator: Initializing child 8 FS
2013-10-11 21:37:38,645 INFO org.apache.hadoop.hive.ql.exec.FileSinkOperator: Initializing Self 8 FS
2013-10-11 21:37:38,671 INFO org.apache.hadoop.hive.ql.exec.FileSinkOperator: Operator 8 FS initialized
2013-10-11 21:37:38,671 INFO org.apache.hadoop.hive.ql.exec.FileSinkOperator: Initialization Done 8 FS
2013-10-11 21:37:38,671 INFO org.apache.hadoop.hive.ql.exec.LimitOperator: Initialization Done 7 LIM
2013-10-11 21:37:38,671 INFO org.apache.hadoop.hive.ql.exec.ExtractOperator: Initialization Done 6 EX
2013-10-11 21:37:38,677 INFO ExecReducer: ExecReducer: processing 1 rows: used memory = 101204000
2013-10-11 21:37:38,677 INFO org.apache.hadoop.hive.ql.exec.ExtractOperator: 6 forwarding 1 rows
2013-10-11 21:37:38,677 INFO org.apache.hadoop.hive.ql.exec.LimitOperator: 7 forwarding 1 rows
2013-10-11 21:37:38,677 INFO org.apache.hadoop.hive.ql.exec.FileSinkOperator: New Final Path: FS /apps/hive/warehouse/_tmp.pagecounts_hbase/000000_0
2013-10-11 21:37:38,881 INFO org.apache.zookeeper.ZooKeeper: Client environment:zookeeper.version=3.4.5-111--1, built on 08/20/2013 01:42 GMT
2013-10-11 21:37:38,881 INFO org.apache.zookeeper.ZooKeeper: Client environment:host.name=hor17n36.gq1.ygridcore.net
2013-10-11 21:37:38,881 INFO org.apache.zookeeper.ZooKeeper: Client environment:java.version=1.6.0_31
2013-10-11 21:37:38,881 INFO org.apache.zookeeper.ZooKeeper: Client environment:java.vendor=Sun Microsystems Inc.
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:java.home=/usr/jdk64/jdk1.6.0_31/jre
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:java.class.path=/etc/hadoop/conf:/usr/hadoop-jdk1.6.0_31/lib/tools.jar:/usr/lib/hadoop/libexec/..:/usr/lib/hadoop/libexec/../hadoop-core-1.2.0.1.3.2.0-111.jar:/usr/lib/hadoop/libexec/../lib/asm-3.2.jar:/usr/lib/hadoop/libexec/../lib/aspectjrt-1.6.11.jar:/usr/lib/hadoop/libexec/../lib/aspectjtools-1.6.11.jar:/usr/lib/hadoop/libexec/../lib/commons-beanutils-1.7.0.jar:/usr/lib/hadoop/libexec/../lib/commons-beanutils-core-1.8.0.jar:/usr/lib/hadoop/libexec/../lib/commons-cli-1.2.jar:/usr/lib/hadoop/libexec/../lib/commons-codec-1.4.jar:/usr/lib/hadoop/libexec/../lib/commons-collections-3.2.1.jar:/usr/lib/hadoop/libexec/../lib/commons-configuration-1.6.jar:/usr/lib/hadoop/libexec/../lib/commons-daemon-1.0.1.jar:/usr/lib/hadoop/libexec/../lib/commons-digester-1.8.jar:/usr/lib/hadoop/libexec/../lib/commons-el-1.0.jar:/usr/lib/hadoop/libexec/../lib/commons-httpclient-3.0.1.jar:/usr/lib/hadoop/libexec/../lib/commons-io-2.1.jar:/usr/lib/hadoop/libexec/../lib/commons-lang-2.4.jar:/usr/lib/hadoop/libexec/../lib/commons-logging-1.1.1.jar:/usr/lib/hadoop/libexec/../lib/commons-logging-api-1.0.4.jar:/usr/lib/hadoop/libexec/../lib/commons-math-2.1.jar:/usr/lib/hadoop/libexec/../lib/commons-net-3.1.jar:/usr/lib/hadoop/libexec/../lib/core-3.1.1.jar:/usr/lib/hadoop/libexec/../lib/guava-11.0.2.jar:/usr/lib/hadoop/libexec/../lib/hadoop-capacity-scheduler-1.2.0.1.3.2.0-111.jar:/usr/lib/hadoop/libexec/../lib/hadoop-fairscheduler-1.2.0.1.3.2.0-111.jar:/usr/lib/hadoop/libexec/../lib/hadoop-lzo-0.5.0.jar:/usr/lib/hadoop/libexec/../lib/hadoop-thriftfs-1.2.0.1.3.2.0-111.jar:/usr/lib/hadoop/libexec/../lib/hsqldb-1.8.0.10.jar:/usr/lib/hadoop/libexec/../lib/jackson-core-asl-1.8.8.jar:/usr/lib/hadoop/libexec/../lib/jackson-mapper-asl-1.8.8.jar:/usr/lib/hadoop/libexec/../lib/jasper-compiler-5.5.12.jar:/usr/lib/hadoop/libexec/../lib/jasper-runtime-5.5.12.jar:/usr/lib/hadoop/libexec/../lib/jdeb-0.8.jar:/usr/lib/hadoop/libexec/../lib/jersey-core-1.8.jar:/usr/lib/hadoop/libexec/../lib/jersey-json-1.8.jar:/usr/lib/hadoop/libexec/../lib/jersey-server-1.8.jar:/usr/lib/hadoop/libexec/../lib/jets3t-0.6.1.jar:/usr/lib/hadoop/libexec/../lib/jetty-6.1.26.jar:/usr/lib/hadoop/libexec/../lib/jetty-util-6.1.26.jar:/usr/lib/hadoop/libexec/../lib/jsch-0.1.42.jar:/usr/lib/hadoop/libexec/../lib/junit-4.5.jar:/usr/lib/hadoop/libexec/../lib/kfs-0.2.2.jar:/usr/lib/hadoop/libexec/../lib/log4j-1.2.15.jar:/usr/lib/hadoop/libexec/../lib/mockito-all-1.8.5.jar:/usr/lib/hadoop/libexec/../lib/netty-3.6.2.Final.jar:/usr/lib/hadoop/libexec/../lib/oro-2.0.8.jar:/usr/lib/hadoop/libexec/../lib/servlet-api-2.5-20081211.jar:/usr/lib/hadoop/libexec/../lib/slf4j-api-1.4.3.jar:/usr/lib/hadoop/libexec/../lib/slf4j-log4j12-1.4.3.jar:/usr/lib/hadoop/libexec/../lib/xmlenc-0.52.jar:/usr/lib/hadoop/libexec/../lib/jsp-2.1/jsp-2.1.jar:/usr/lib/hadoop/libexec/../lib/jsp-2.1/jsp-api-2.1.jar:/grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars/classes:/grid/5/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/jars:/grid/5/hdp/mapred/local/taskTracker/hrt_qa/distcache/8475069732773479592_-1538157662_695434820/hor18n34.gq1.ygridcore.net/user/hrt_qa/.staging/job_201310110032_0023/libjars/hbase-site.xml:/grid/0/hdp/mapred/local/taskTracker/hrt_qa/distcache/-5549176264816609739_223569749_695434869/hor18n34.gq1.ygridcore.net/user/hrt_qa/.staging/job_201310110032_0023/libjars/hive-hbase-handler-0.11.0.1.3.2.0-111.jar:/grid/1/hdp/mapred/local/taskTracker/hrt_qa/distcache/3163893831459437436_507105717_695434978/hor18n34.gq1.ygridcore.net/user/hrt_qa/.staging/job_201310110032_0023/libjars/hbase-0.94.6.1.3.2.0-111-security.jar:/grid/2/hdp/mapred/local/taskTracker/hrt_qa/distcache/1522721733283368288_-399969024_695435122/hor18n34.gq1.ygridcore.net/user/hrt_qa/.staging/job_201310110032_0023/libjars/zookeeper-3.4.5.1.3.2.0-111.jar:/grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:java.library.path=/usr/lib/hadoop/libexec/../lib/native/Linux-amd64-64:/grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:java.io.tmpdir=/grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work/tmp
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:java.compiler=<NA>
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:os.name=Linux
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:os.arch=amd64
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:os.version=2.6.32-220.23.1.el6.YAHOO.20120713.x86_64
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:user.name=hrt_qa
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:user.home=/homes/hrt_qa
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Client environment:user.dir=/grid/1/hdp/mapred/local/taskTracker/hrt_qa/jobcache/job_201310110032_0023/attempt_201310110032_0023_r_000000_0/work
2013-10-11 21:37:38,882 INFO org.apache.zookeeper.ZooKeeper: Initiating client connection, connectString=hor17n34.gq1.ygridcore.net:2181 sessionTimeout=60000 watcher=hconnection
2013-10-11 21:37:38,898 INFO org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper: The identifier of this process is 5359@hor17n36.gq1.ygridcore.net
2013-10-11 21:37:38,904 INFO org.apache.zookeeper.ClientCnxn: Opening socket connection to server hor17n34.gq1.ygridcore.net/206.190.52.53:2181. Will not attempt to authenticate using SASL (Unable to locate a login configuration)
2013-10-11 21:37:38,905 INFO org.apache.zookeeper.ClientCnxn: Socket connection established to hor17n34.gq1.ygridcore.net/206.190.52.53:2181, initiating session
2013-10-11 21:37:38,937 INFO org.apache.zookeeper.ClientCnxn: Session establishment complete on server hor17n34.gq1.ygridcore.net/206.190.52.53:2181, sessionid = 0x141a4ef4d8b001d, negotiated timeout = 40000
2013-10-11 21:37:39,021 ERROR org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:hrt_qa cause:javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
2013-10-11 21:37:39,021 WARN org.apache.hadoop.ipc.SecureClient: Exception encountered while connecting to the server : javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
2013-10-11 21:37:39,022 FATAL org.apache.hadoop.ipc.SecureClient: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:194)
	at org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect(HBaseSaslRpcClient.java:141)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupSaslConnection(SecureClient.java:198)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.access$600(SecureClient.java:96)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:306)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:303)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.util.Methods.call(Methods.java:37)
	at org.apache.hadoop.hbase.security.User.call(User.java:590)
	at org.apache.hadoop.hbase.security.User.access$700(User.java:51)
	at org.apache.hadoop.hbase.security.User$SecureHadoopUser.runAs(User.java:444)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupIOstreams(SecureClient.java:302)
	at org.apache.hadoop.hbase.ipc.HBaseClient.getConnection(HBaseClient.java:1124)
	at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:974)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine$Invoker.invoke(SecureRpcEngine.java:104)
	at $Proxy10.getProtocolVersion(Unknown Source)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine.getProxy(SecureRpcEngine.java:146)
	at org.apache.hadoop.hbase.ipc.HBaseRPC.waitForProxy(HBaseRPC.java:208)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1346)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1305)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1292)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:1001)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:896)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:998)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:900)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:857)
	at org.apache.hadoop.hbase.client.HTable.finishSetup(HTable.java:234)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:174)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:133)
	at org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat.getHiveRecordWriter(HiveHBaseTableOutputFormat.java:83)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:250)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:237)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:515)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.processOp(FileSinkOperator.java:571)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:502)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:832)
	at org.apache.hadoop.hive.ql.exec.LimitOperator.processOp(LimitOperator.java:51)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:502)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:832)
	at org.apache.hadoop.hive.ql.exec.ExtractOperator.processOp(ExtractOperator.java:45)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:502)
	at org.apache.hadoop.hive.ql.exec.ExecReducer.reduce(ExecReducer.java:253)
	at org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:522)
	at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:421)
	at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
	at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:130)
	at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:106)
	at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:172)
	at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:209)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:195)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:162)
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:175)
	... 54 more
2013-10-11 21:37:39,027 FATAL ExecReducer: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row (tag=0) {"key":{},"value":{"_col0":"en/q:Pan%27s_Labyrinth/20081001-080000","_col1":"1","_col2":"1153"},"alias":0}
	at org.apache.hadoop.hive.ql.exec.ExecReducer.reduce(ExecReducer.java:262)
	at org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:522)
	at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:421)
	at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:240)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:515)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.processOp(FileSinkOperator.java:571)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:502)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:832)
	at org.apache.hadoop.hive.ql.exec.LimitOperator.processOp(LimitOperator.java:51)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:502)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:832)
	at org.apache.hadoop.hive.ql.exec.ExtractOperator.processOp(ExtractOperator.java:45)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:502)
	at org.apache.hadoop.hive.ql.exec.ExecReducer.reduce(ExecReducer.java:253)
	... 7 more
Caused by: java.lang.RuntimeException: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$1.run(SecureClient.java:263)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.util.Methods.call(Methods.java:37)
	at org.apache.hadoop.hbase.security.User.call(User.java:590)
	at org.apache.hadoop.hbase.security.User.access$700(User.java:51)
	at org.apache.hadoop.hbase.security.User$SecureHadoopUser.runAs(User.java:444)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.handleSaslConnectionFailure(SecureClient.java:224)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupIOstreams(SecureClient.java:313)
	at org.apache.hadoop.hbase.ipc.HBaseClient.getConnection(HBaseClient.java:1124)
	at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:974)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine$Invoker.invoke(SecureRpcEngine.java:104)
	at $Proxy10.getProtocolVersion(Unknown Source)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine.getProxy(SecureRpcEngine.java:146)
	at org.apache.hadoop.hbase.ipc.HBaseRPC.waitForProxy(HBaseRPC.java:208)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1346)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1305)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1292)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:1001)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:896)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:998)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:900)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:857)
	at org.apache.hadoop.hbase.client.HTable.finishSetup(HTable.java:234)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:174)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:133)
	at org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat.getHiveRecordWriter(HiveHBaseTableOutputFormat.java:83)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:250)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:237)
	... 17 more
Caused by: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:194)
	at org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect(HBaseSaslRpcClient.java:141)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupSaslConnection(SecureClient.java:198)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.access$600(SecureClient.java:96)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:306)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:303)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.util.Methods.call(Methods.java:37)
	at org.apache.hadoop.hbase.security.User.call(User.java:590)
	at org.apache.hadoop.hbase.security.User.access$700(User.java:51)
	at org.apache.hadoop.hbase.security.User$SecureHadoopUser.runAs(User.java:444)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupIOstreams(SecureClient.java:302)
	... 37 more
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
	at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:130)
	at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:106)
	at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:172)
	at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:209)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:195)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:162)
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:175)
	... 54 more

2013-10-11 21:37:39,027 INFO ExecReducer: ExecReducer: processed 1 rows: used memory = 23485688
2013-10-11 21:37:39,027 INFO org.apache.hadoop.hive.ql.exec.ExtractOperator: 6 finished. closing... 
2013-10-11 21:37:39,027 INFO org.apache.hadoop.hive.ql.exec.ExtractOperator: 6 forwarded 1 rows
2013-10-11 21:37:39,027 INFO org.apache.hadoop.hive.ql.exec.LimitOperator: 7 finished. closing... 
2013-10-11 21:37:39,027 INFO org.apache.hadoop.hive.ql.exec.LimitOperator: 7 forwarded 1 rows
2013-10-11 21:37:39,027 INFO org.apache.hadoop.hive.ql.exec.FileSinkOperator: 8 finished. closing... 
2013-10-11 21:37:39,027 INFO org.apache.hadoop.hive.ql.exec.FileSinkOperator: 8 forwarded 0 rows
2013-10-11 21:37:39,027 INFO org.apache.hadoop.hive.ql.exec.FileSinkOperator: New Final Path: FS /apps/hive/warehouse/_tmp.pagecounts_hbase/000000_0
2013-10-11 21:37:39,048 ERROR org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:hrt_qa cause:javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
2013-10-11 21:37:39,048 WARN org.apache.hadoop.ipc.SecureClient: Exception encountered while connecting to the server : javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
2013-10-11 21:37:39,048 FATAL org.apache.hadoop.ipc.SecureClient: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:194)
	at org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect(HBaseSaslRpcClient.java:141)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupSaslConnection(SecureClient.java:198)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.access$600(SecureClient.java:96)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:306)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:303)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.util.Methods.call(Methods.java:37)
	at org.apache.hadoop.hbase.security.User.call(User.java:590)
	at org.apache.hadoop.hbase.security.User.access$700(User.java:51)
	at org.apache.hadoop.hbase.security.User$SecureHadoopUser.runAs(User.java:444)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupIOstreams(SecureClient.java:302)
	at org.apache.hadoop.hbase.ipc.HBaseClient.getConnection(HBaseClient.java:1124)
	at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:974)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine$Invoker.invoke(SecureRpcEngine.java:104)
	at $Proxy10.getProtocolVersion(Unknown Source)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine.getProxy(SecureRpcEngine.java:146)
	at org.apache.hadoop.hbase.ipc.HBaseRPC.waitForProxy(HBaseRPC.java:208)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1346)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1305)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1292)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:1001)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:896)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:998)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:900)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:857)
	at org.apache.hadoop.hbase.client.HTable.finishSetup(HTable.java:234)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:174)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:133)
	at org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat.getHiveRecordWriter(HiveHBaseTableOutputFormat.java:83)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:250)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:237)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:515)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.closeOp(FileSinkOperator.java:859)
	at org.apache.hadoop.hive.ql.exec.Operator.close(Operator.java:588)
	at org.apache.hadoop.hive.ql.exec.Operator.close(Operator.java:597)
	at org.apache.hadoop.hive.ql.exec.Operator.close(Operator.java:597)
	at org.apache.hadoop.hive.ql.exec.ExecReducer.close(ExecReducer.java:309)
	at org.apache.hadoop.io.IOUtils.cleanup(IOUtils.java:232)
	at org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:539)
	at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:421)
	at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
	at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:130)
	at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:106)
	at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:172)
	at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:209)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:195)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:162)
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:175)
	... 51 more
2013-10-11 21:37:39,066 INFO org.apache.hadoop.mapred.TaskLogsTruncater: Initializing logs' truncater with mapRetainSize=-1 and reduceRetainSize=-1
2013-10-11 21:37:39,086 INFO org.apache.hadoop.io.nativeio.NativeIO: Initialized cache for UID to User mapping with a cache timeout of 14400 seconds.
2013-10-11 21:37:39,087 INFO org.apache.hadoop.io.nativeio.NativeIO: Got UserName hrt_qa for UID 67992 from the native implementation
2013-10-11 21:37:39,088 WARN org.apache.hadoop.mapred.Child: Error running child
java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row (tag=0) {"key":{},"value":{"_col0":"en/q:Pan%27s_Labyrinth/20081001-080000","_col1":"1","_col2":"1153"},"alias":0}
	at org.apache.hadoop.hive.ql.exec.ExecReducer.reduce(ExecReducer.java:274)
	at org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:522)
	at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:421)
	at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row (tag=0) {"key":{},"value":{"_col0":"en/q:Pan%27s_Labyrinth/20081001-080000","_col1":"1","_col2":"1153"},"alias":0}
	at org.apache.hadoop.hive.ql.exec.ExecReducer.reduce(ExecReducer.java:262)
	... 7 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:240)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:515)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.processOp(FileSinkOperator.java:571)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:502)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:832)
	at org.apache.hadoop.hive.ql.exec.LimitOperator.processOp(LimitOperator.java:51)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:502)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:832)
	at org.apache.hadoop.hive.ql.exec.ExtractOperator.processOp(ExtractOperator.java:45)
	at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:502)
	at org.apache.hadoop.hive.ql.exec.ExecReducer.reduce(ExecReducer.java:253)
	... 7 more
Caused by: java.lang.RuntimeException: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$1.run(SecureClient.java:263)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.util.Methods.call(Methods.java:37)
	at org.apache.hadoop.hbase.security.User.call(User.java:590)
	at org.apache.hadoop.hbase.security.User.access$700(User.java:51)
	at org.apache.hadoop.hbase.security.User$SecureHadoopUser.runAs(User.java:444)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.handleSaslConnectionFailure(SecureClient.java:224)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupIOstreams(SecureClient.java:313)
	at org.apache.hadoop.hbase.ipc.HBaseClient.getConnection(HBaseClient.java:1124)
	at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:974)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine$Invoker.invoke(SecureRpcEngine.java:104)
	at $Proxy10.getProtocolVersion(Unknown Source)
	at org.apache.hadoop.hbase.ipc.SecureRpcEngine.getProxy(SecureRpcEngine.java:146)
	at org.apache.hadoop.hbase.ipc.HBaseRPC.waitForProxy(HBaseRPC.java:208)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1346)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1305)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHRegionConnection(HConnectionManager.java:1292)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:1001)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:896)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegionInMeta(HConnectionManager.java:998)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:900)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:857)
	at org.apache.hadoop.hbase.client.HTable.finishSetup(HTable.java:234)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:174)
	at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:133)
	at org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat.getHiveRecordWriter(HiveHBaseTableOutputFormat.java:83)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:250)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:237)
	... 17 more
Caused by: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:194)
	at org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect(HBaseSaslRpcClient.java:141)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupSaslConnection(SecureClient.java:198)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.access$600(SecureClient.java:96)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:306)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection$2.run(SecureClient.java:303)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.util.Methods.call(Methods.java:37)
	at org.apache.hadoop.hbase.security.User.call(User.java:590)
	at org.apache.hadoop.hbase.security.User.access$700(User.java:51)
	at org.apache.hadoop.hbase.security.User$SecureHadoopUser.runAs(User.java:444)
	at org.apache.hadoop.hbase.ipc.SecureClient$SecureConnection.setupIOstreams(SecureClient.java:302)
	... 37 more
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
	at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:130)
	at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:106)
	at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:172)
	at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:209)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:195)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:162)
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:175)
	... 54 more
2013-10-11 21:37:39,091 INFO org.apache.hadoop.mapred.Task: Runnning cleanup for the task