20/10/30 21:43:56 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:56 DEBUG ProtobufRpcEngine: Call: getContainerReport took 2ms 20/10/30 21:43:56 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:56 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:56 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 2ms 20/10/30 21:43:56 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:56 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 317504 vcore_seconds: 70 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:56 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:43:56 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} ... 1 more at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:112) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:394) at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:358) at io.netty.channel.AbstractChannel$AbstractUnsafe$7.run(AbstractChannel.java:679) at io.netty.channel.DefaultChannelPipeline.fireChannelInactive(DefaultChannelPipeline.java:872) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelInactive(DefaultChannelPipeline.java:1275) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at io.netty.channel.ChannelInboundHandlerAdapter.channelInactive(ChannelInboundHandlerAdapter.java:75) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at io.netty.handler.logging.LoggingHandler.channelInactive(LoggingHandler.java:170) at io.netty.channel.ChannelInboundHandlerAdapter.channelInactive(ChannelInboundHandlerAdapter.java:75) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at org.apache.livy.rsc.rpc.KryoMessageCodec.channelInactive(KryoMessageCodec.java:104) at io.netty.handler.codec.ByteToMessageCodec.channelInactive(ByteToMessageCodec.java:118) at io.netty.handler.codec.ByteToMessageDecoder.channelInactive(ByteToMessageDecoder.java:325) at io.netty.handler.codec.ByteToMessageDecoder.channelInputClosed(ByteToMessageDecoder.java:360) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at org.apache.livy.rsc.rpc.SaslHandler.channelInactive(SaslHandler.java:92) at org.apache.livy.rsc.rpc.Rpc$SaslClientHandler.dispose(Rpc.java:419) Caused by: javax.security.sasl.SaslException: Client closed before SASL negotiation finished. at java.lang.Thread.run(Thread.java:748) at org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:589) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:671) at org.eclipse.jetty.util.thread.strategy.ExecuteProduceConsume.run(ExecuteProduceConsume.java:136) at org.eclipse.jetty.util.thread.strategy.ExecuteProduceConsume.produceConsume(ExecuteProduceConsume.java:148) at org.eclipse.jetty.util.thread.strategy.ExecuteProduceConsume.executeProduceConsume(ExecuteProduceConsume.java:303) at org.eclipse.jetty.io.SelectChannelEndPoint$2.run(SelectChannelEndPoint.java:93) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:108) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:283) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:251) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:333) at org.eclipse.jetty.server.Server.handle(Server.java:539) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:134) at org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:119) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1112) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:512) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1180) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:584) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:840) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.scalatra.ScalatraServlet.service(ScalatraServlet.scala:54) at org.apache.livy.server.interactive.InteractiveSessionServlet.handle(InteractiveSessionServlet.scala:40) at org.scalatra.servlet.FileUploadSupport$class.handle(FileUploadSupport.scala:93) at org.apache.livy.server.interactive.InteractiveSessionServlet.org$scalatra$servlet$FileUploadSupport$$super$handle(InteractiveSessionServlet.scala:40) at org.scalatra.GZipSupport$class.handle(GZipSupport.scala:18) at org.scalatra.ScalatraServlet.withRequestResponse(ScalatraServlet.scala:49) at org.scalatra.DynamicScope$class.withRequestResponse(DynamicScope.scala:59) at org.scalatra.ScalatraServlet.withRequest(ScalatraServlet.scala:49) at org.scalatra.DynamicScope$class.withRequest(DynamicScope.scala:71) at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58) at org.scalatra.DynamicScope$$anonfun$withRequestResponse$1.apply(DynamicScope.scala:60) at org.scalatra.ScalatraServlet.withResponse(ScalatraServlet.scala:49) at org.scalatra.DynamicScope$class.withResponse(DynamicScope.scala:80) at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58) at org.scalatra.GZipSupport$$anonfun$handle$1.apply(GZipSupport.scala:19) at org.scalatra.GZipSupport$$anonfun$handle$1.apply(GZipSupport.scala:19) at org.scalatra.GZipSupport$$anonfun$handle$1.apply$mcV$sp(GZipSupport.scala:34) at org.apache.livy.server.SessionServlet.org$scalatra$GZipSupport$$super$handle(SessionServlet.scala:42) at org.scalatra.MethodOverride$class.handle(MethodOverride.scala:28) at org.apache.livy.server.SessionServlet.org$scalatra$MethodOverride$$super$handle(SessionServlet.scala:42) at org.scalatra.servlet.ServletBase$class.handle(ServletBase.scala:43) at org.scalatra.ScalatraServlet.org$scalatra$servlet$ServletBase$$super$handle(ScalatraServlet.scala:49) at org.scalatra.ScalatraBase$class.handle(ScalatraBase.scala:111) at org.scalatra.ScalatraServlet.withRequestResponse(ScalatraServlet.scala:49) at org.scalatra.DynamicScope$class.withRequestResponse(DynamicScope.scala:59) at org.scalatra.ScalatraServlet.withRequest(ScalatraServlet.scala:49) at org.scalatra.DynamicScope$class.withRequest(DynamicScope.scala:71) at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58) at org.scalatra.DynamicScope$$anonfun$withRequestResponse$1.apply(DynamicScope.scala:60) at org.scalatra.ScalatraServlet.withResponse(ScalatraServlet.scala:49) at org.scalatra.DynamicScope$class.withResponse(DynamicScope.scala:80) at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58) at org.scalatra.ScalatraBase$$anonfun$handle$1.apply(ScalatraBase.scala:113) at org.scalatra.ScalatraBase$$anonfun$handle$1.apply(ScalatraBase.scala:113) at org.scalatra.ScalatraBase$$anonfun$handle$1.apply$mcV$sp(ScalatraBase.scala:113) at org.scalatra.ScalatraServlet.executeRoutes(ScalatraServlet.scala:49) at org.scalatra.ScalatraBase$class.executeRoutes(ScalatraBase.scala:175) at org.scalatra.ScalatraBase$class.org$scalatra$ScalatraBase$$cradleHalt(ScalatraBase.scala:193) at org.scalatra.ScalatraBase$$anonfun$executeRoutes$1.apply(ScalatraBase.scala:175) at org.scalatra.ScalatraBase$$anonfun$executeRoutes$1.apply(ScalatraBase.scala:175) at org.scalatra.ScalatraBase$$anonfun$executeRoutes$1.apply$mcV$sp(ScalatraBase.scala:175) at org.scalatra.ScalatraBase$class.runActions$1(ScalatraBase.scala:163) at org.scalatra.ScalatraServlet.runRoutes(ScalatraServlet.scala:49) at org.scalatra.ScalatraBase$class.runRoutes(ScalatraBase.scala:237) at scala.collection.immutable.Stream.flatMap(Stream.scala:493) at org.scalatra.ScalatraBase$$anonfun$runRoutes$1.apply(ScalatraBase.scala:237) at org.scalatra.ScalatraBase$$anonfun$runRoutes$1.apply(ScalatraBase.scala:238) at scala.Option.flatMap(Option.scala:171) at org.scalatra.ScalatraBase$$anonfun$runRoutes$1$$anonfun$apply$8.apply(ScalatraBase.scala:238) at org.scalatra.ScalatraBase$$anonfun$runRoutes$1$$anonfun$apply$8.apply(ScalatraBase.scala:240) at org.scalatra.ScalatraServlet.invoke(ScalatraServlet.scala:49) at org.scalatra.ScalatraBase$class.invoke(ScalatraBase.scala:264) at org.apache.livy.server.JsonServlet.withRouteMultiParams(JsonServlet.scala:39) at org.scalatra.ApiFormats$class.withRouteMultiParams(ApiFormats.scala:178) at org.scalatra.ScalatraBase$$anonfun$invoke$1.apply(ScalatraBase.scala:265) at org.scalatra.ScalatraBase$$anonfun$invoke$1.apply(ScalatraBase.scala:265) at org.scalatra.ScalatraBase$class.org$scalatra$ScalatraBase$$liftAction(ScalatraBase.scala:270) at org.apache.livy.server.interactive.InteractiveSessionServlet$$anonfun$8.apply(InteractiveSessionServlet.scala:106) at org.apache.livy.server.interactive.InteractiveSessionServlet.withViewAccessSession(InteractiveSessionServlet.scala:40) at org.apache.livy.server.interactive.SessionHeartbeatNotifier$class.withViewAccessSession(SessionHeartbeat.scala:67) at org.apache.livy.server.interactive.InteractiveSessionServlet.org$apache$livy$server$interactive$SessionHeartbeatNotifier$$super$withViewAccessSession(InteractiveSessionServlet.scala:40) at org.apache.livy.server.SessionServlet.withViewAccessSession(SessionServlet.scala:199) at org.apache.livy.server.SessionServlet.doWithSession(SessionServlet.scala:226) at org.apache.livy.server.interactive.SessionHeartbeatNotifier$$anonfun$withViewAccessSession$1.apply(SessionHeartbeat.scala:67) at org.apache.livy.server.interactive.SessionHeartbeatNotifier$$anonfun$withViewAccessSession$1.apply(SessionHeartbeat.scala:69) at org.apache.livy.server.interactive.InteractiveSessionServlet$$anonfun$8$$anonfun$apply$3.apply(InteractiveSessionServlet.scala:106) at org.apache.livy.server.interactive.InteractiveSessionServlet$$anonfun$8$$anonfun$apply$3.apply(InteractiveSessionServlet.scala:107) at org.apache.livy.server.interactive.InteractiveSession.statements(InteractiveSession.scala:490) at io.netty.util.concurrent.AbstractFuture.get(AbstractFuture.java:41) java.util.concurrent.ExecutionException: javax.security.sasl.SaslException: Client closed before SASL negotiation finished. 20/10/30 21:43:51 ERROR SessionServlet$: internal error 20/10/30 21:43:51 INFO RSCClient: Failing pending job 20a80b05-0b63-42be-8678-bf8dbed969e9 due to shutdown. 20/10/30 21:43:51 DEBUG Rpc: [id: 0xf7eada9d, L:/100.106.192.139:43568 ! R:/10.139.22.111:11000] UNREGISTERED at java.lang.Thread.run(Thread.java:748) at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:112) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:394) at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:358) at io.netty.channel.AbstractChannel$AbstractUnsafe$7.run(AbstractChannel.java:679) at io.netty.channel.DefaultChannelPipeline.fireChannelInactive(DefaultChannelPipeline.java:872) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelInactive(DefaultChannelPipeline.java:1275) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at io.netty.channel.ChannelInboundHandlerAdapter.channelInactive(ChannelInboundHandlerAdapter.java:75) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at io.netty.handler.logging.LoggingHandler.channelInactive(LoggingHandler.java:170) at io.netty.channel.ChannelInboundHandlerAdapter.channelInactive(ChannelInboundHandlerAdapter.java:75) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at org.apache.livy.rsc.rpc.KryoMessageCodec.channelInactive(KryoMessageCodec.java:104) at io.netty.handler.codec.ByteToMessageCodec.channelInactive(ByteToMessageCodec.java:118) at io.netty.handler.codec.ByteToMessageDecoder.channelInactive(ByteToMessageDecoder.java:325) at io.netty.handler.codec.ByteToMessageDecoder.channelInputClosed(ByteToMessageDecoder.java:360) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at org.apache.livy.rsc.rpc.SaslHandler.channelInactive(SaslHandler.java:92) at org.apache.livy.rsc.rpc.Rpc$SaslClientHandler.dispose(Rpc.java:419) javax.security.sasl.SaslException: Client closed before SASL negotiation finished. 20/10/30 21:43:51 ERROR RSCClient: RPC error. at java.lang.Thread.run(Thread.java:748) at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:112) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:394) at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:358) at io.netty.channel.AbstractChannel$AbstractUnsafe$7.run(AbstractChannel.java:679) at io.netty.channel.DefaultChannelPipeline.fireChannelInactive(DefaultChannelPipeline.java:872) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelInactive(DefaultChannelPipeline.java:1275) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at io.netty.channel.ChannelInboundHandlerAdapter.channelInactive(ChannelInboundHandlerAdapter.java:75) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at io.netty.handler.logging.LoggingHandler.channelInactive(LoggingHandler.java:170) at io.netty.channel.ChannelInboundHandlerAdapter.channelInactive(ChannelInboundHandlerAdapter.java:75) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at org.apache.livy.rsc.rpc.KryoMessageCodec.channelInactive(KryoMessageCodec.java:104) at io.netty.handler.codec.ByteToMessageCodec.channelInactive(ByteToMessageCodec.java:118) at io.netty.handler.codec.ByteToMessageDecoder.channelInactive(ByteToMessageDecoder.java:325) at io.netty.handler.codec.ByteToMessageDecoder.channelInputClosed(ByteToMessageDecoder.java:360) at io.netty.channel.AbstractChannelHandlerContext.fireChannelInactive(AbstractChannelHandlerContext.java:212) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:219) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelInactive(AbstractChannelHandlerContext.java:233) at org.apache.livy.rsc.rpc.SaslHandler.channelInactive(SaslHandler.java:92) at org.apache.livy.rsc.rpc.Rpc$SaslClientHandler.dispose(Rpc.java:419) javax.security.sasl.SaslException: Client closed before SASL negotiation finished. 20/10/30 21:43:51 ERROR RSCClient: Failed to connect to context. 20/10/30 21:43:51 DEBUG Rpc: [id: 0xf7eada9d, L:/100.106.192.139:43568 ! R:/10.139.22.111:11000] CLOSE() 20/10/30 21:43:51 DEBUG Rpc: [id: 0xf7eada9d, L:/100.106.192.139:43568 ! R:/10.139.22.111:11000] INACTIVE 20/10/30 21:43:51 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:51 DEBUG ProtobufRpcEngine: Call: getContainerReport took 2ms 20/10/30 21:43:51 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:51 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:51 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 2ms 20/10/30 21:43:51 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:51 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 294913 vcore_seconds: 65 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:51 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:43:51 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:43:46 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:46 DEBUG ProtobufRpcEngine: Call: getContainerReport took 2ms 20/10/30 21:43:46 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:46 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:46 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 2ms 20/10/30 21:43:46 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:46 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 272321 vcore_seconds: 60 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:46 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:43:46 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:43:41 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:41 DEBUG ProtobufRpcEngine: Call: getContainerReport took 2ms 20/10/30 21:43:41 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:41 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:41 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 2ms 20/10/30 21:43:41 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:41 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 248006 vcore_seconds: 54 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:41 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 3ms 20/10/30 21:43:41 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:43:36 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:36 DEBUG ProtobufRpcEngine: Call: getContainerReport took 2ms 20/10/30 21:43:36 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:36 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:36 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 2ms 20/10/30 21:43:36 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:36 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 222513 vcore_seconds: 49 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:36 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:43:36 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:43:31 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:31 DEBUG ProtobufRpcEngine: Call: getContainerReport took 2ms 20/10/30 21:43:31 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:31 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:31 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 1ms 20/10/30 21:43:31 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:31 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 204547 vcore_seconds: 45 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:31 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:43:31 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:43:26 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:26 DEBUG ProtobufRpcEngine: Call: getContainerReport took 1ms 20/10/30 21:43:26 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:26 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:26 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 1ms 20/10/30 21:43:26 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:26 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 181955 vcore_seconds: 40 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:26 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:43:26 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:43:21 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:21 DEBUG ProtobufRpcEngine: Call: getContainerReport took 1ms 20/10/30 21:43:21 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:21 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:21 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 2ms 20/10/30 21:43:21 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:21 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 159359 vcore_seconds: 35 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:21 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:43:21 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:43:16 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:16 DEBUG ProtobufRpcEngine: Call: getContainerReport took 1ms 20/10/30 21:43:16 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:16 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:16 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 1ms 20/10/30 21:43:16 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:16 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 136763 vcore_seconds: 30 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:16 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:43:16 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:43:11 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:11 DEBUG ProtobufRpcEngine: Call: getContainerReport took 1ms 20/10/30 21:43:11 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:11 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:11 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 1ms 20/10/30 21:43:11 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:11 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 114176 vcore_seconds: 25 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:11 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:43:11 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:43:06 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:06 DEBUG ProtobufRpcEngine: Call: getContainerReport took 2ms 20/10/30 21:43:06 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:06 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:06 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 2ms 20/10/30 21:43:06 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:06 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 91580 vcore_seconds: 20 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:06 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 3ms 20/10/30 21:43:06 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:43:01 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:43:01 DEBUG ProtobufRpcEngine: Call: getContainerReport took 2ms 20/10/30 21:43:01 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:43:01 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:43:01 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 2ms 20/10/30 21:43:01 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:43:01 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 68988 vcore_seconds: 15 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:43:01 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:43:01 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:42:56 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:42:56 DEBUG ProtobufRpcEngine: Call: getContainerReport took 1ms 20/10/30 21:42:56 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:42:56 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" yarn_application_attempt_state: APP_ATTEMPT_RUNNING am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "http://ip-10-85-100-91.vpc.internal:42481" 9: 1604094166408 10: 0 }} 20/10/30 21:42:56 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 2ms 20/10/30 21:42:56 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:42:56 DEBUG InteractiveSession: InteractiveSession 0 app state changed from STARTING to RUNNING 20/10/30 21:42:56 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "ip-10-85-100-91.vpc.internal" rpc_port: 39989 yarn_application_state: RUNNING trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 46374 vcore_seconds: 10 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "http://ip-10-85-100-91.vpc.internal:42481" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.1 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:42:56 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 3ms 20/10/30 21:42:56 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:42:51 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_report { container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } resource { memory: 4512 virtual_cores: 1 } node_id { host: "ip-10-85-100-91.vpc.internal" port: 8041 } priority { priority: 0 } creation_time: 1604094166529 finish_time: 0 log_url: "http://ip-10-85-100-91.vpc.internal:8042/node/containerlogs/container_1604078122294_0004_01_000001/hadoop" container_exit_status: 0 container_state: C_RUNNING node_http_address: "http://ip-10-85-100-91.vpc.internal:8042" }} 20/10/30 21:42:51 DEBUG ProtobufRpcEngine: Call: getContainerReport took 2ms 20/10/30 21:42:51 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getContainerReport {container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 }} 20/10/30 21:42:51 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_report { application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } host: "N/A" rpc_port: -1 tracking_url: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "AM container is launched, waiting for AM container to Register with RM" yarn_application_attempt_state: APP_ATTEMPT_LAUNCHED am_container_id { app_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } id: 1 } original_tracking_url: "N/A" 9: 1604094166408 10: 0 }} 20/10/30 21:42:51 DEBUG ProtobufRpcEngine: Call: getApplicationAttemptReport took 2ms 20/10/30 21:42:51 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationAttemptReport {application_attempt_id { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 }} 20/10/30 21:42:51 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_report { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "N/A" rpc_port: -1 yarn_application_state: ACCEPTED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "AM container is launched, waiting for AM container to Register with RM" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 23620 vcore_seconds: 5 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "N/A" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.0 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:42:51 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 3ms 20/10/30 21:42:51 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplicationReport {application_id { id: 4 cluster_timestamp: 1604078122294 }} 20/10/30 21:42:51 DEBUG Rpc: [id: 0xf7eada9d, L:/100.106.192.139:43568 - R:/10.139.22.111:11000] ACTIVE 20/10/30 21:42:51 DEBUG Rpc: [id: 0xf7eada9d, L:/100.106.192.139:43568 - R:/10.139.22.111:11000] FLUSH +--------+-------------------------------------------------+----------------+ |00000020| 38 61 31 31 37 37 64 34 34 32 b3 01 01 |8a1177d442... | |00000010| 64 64 36 31 2d 34 32 32 34 2d 62 39 30 32 2d 39 |dd61-4224-b902-9| |00000000| 00 00 00 29 14 01 01 65 65 66 62 36 35 36 64 2d |...)...eefb656d-| +--------+-------------------------------------------------+----------------+ | 0 1 2 3 4 5 6 7 8 9 a b c d e f | +-------------------------------------------------+ 20/10/30 21:42:51 DEBUG Rpc: [id: 0xf7eada9d, L:/100.106.192.139:43568 - R:/10.139.22.111:11000] WRITE: 45B 20/10/30 21:42:51 DEBUG KryoMessageCodec: Encoded message of type org.apache.livy.rsc.rpc.Rpc$SaslMessage (41 bytes) 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 ! R:/10.139.22.111:57906] UNREGISTERED 20/10/30 21:42:51 DEBUG RpcDispatcher: Channel [id: 0x5e9c0940, L:/100.106.192.139:11000 ! R:/10.139.22.111:57906] became inactive. 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 ! R:/10.139.22.111:57906] INACTIVE 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] CLOSE() 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] FLUSH +--------+-------------------------------------------------+----------------+ |00000000| 00 00 00 02 13 01 |...... | +--------+-------------------------------------------------+----------------+ | 0 1 2 3 4 5 6 7 8 9 a b c d e f | +-------------------------------------------------+ 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] WRITE: 6B 20/10/30 21:42:51 DEBUG KryoMessageCodec: Encoded message of type org.apache.livy.rsc.rpc.Rpc$NullMessage (2 bytes) +--------+-------------------------------------------------+----------------+ |00000000| 00 00 00 05 12 01 00 01 02 |......... | +--------+-------------------------------------------------+----------------+ | 0 1 2 3 4 5 6 7 8 9 a b c d e f | +-------------------------------------------------+ 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] WRITE: 9B 20/10/30 21:42:51 DEBUG KryoMessageCodec: Encoded message of type org.apache.livy.rsc.rpc.Rpc$MessageHeader (5 bytes) 20/10/30 21:42:51 DEBUG ContextLauncher: Received driver info for client [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906]: ip-10-85-100-91.vpc.internal/11000. 20/10/30 21:42:51 DEBUG RpcDispatcher: [RegistrationHandler] Received RPC message: type=CALL id=0 payload=org.apache.livy.rsc.BaseProtocol$RemoteDriverAddress 20/10/30 21:42:51 DEBUG KryoMessageCodec: Decoded message of type org.apache.livy.rsc.BaseProtocol$RemoteDriverAddress (87 bytes) 20/10/30 21:42:51 DEBUG KryoMessageCodec: Decoded message of type org.apache.livy.rsc.rpc.Rpc$MessageHeader (5 bytes) +--------+-------------------------------------------------+----------------+ |00000060| ec f0 ab 01 |.... | |00000050| 30 2d 39 31 2e 76 70 63 2e 69 6e 74 65 72 6e 61 |0-91.vpc.interna| |00000040| 65 73 f3 01 01 69 70 2d 31 30 2d 38 35 2d 31 30 |es...ip-10-85-10| |00000030| 52 65 6d 6f 74 65 44 72 69 76 65 72 41 64 64 72 |RemoteDriverAddr| |00000020| 73 63 2e 42 61 73 65 50 72 6f 74 6f 63 6f 6c 24 |sc.BaseProtocol$| |00000010| 72 67 2e 61 70 61 63 68 65 2e 6c 69 76 79 2e 72 |rg.apache.livy.r| |00000000| 00 00 00 05 12 01 00 01 01 00 00 00 57 01 00 6f |............W..o| +--------+-------------------------------------------------+----------------+ | 0 1 2 3 4 5 6 7 8 9 a b c d e f | +-------------------------------------------------+ 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] RECEIVED: 100B 20/10/30 21:42:51 DEBUG ContextLauncher: New RPC client connected from [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906]. 20/10/30 21:42:51 DEBUG RpcServer$SaslServerHandler: SASL negotiation finished with QOP auth. 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] FLUSH +--------+-------------------------------------------------+----------------+ |00000030| 62 |b | |00000020| 62 66 38 31 34 31 34 62 36 31 64 65 61 33 65 66 |bf81414b61dea3ef| |00000010| 3d 66 61 65 39 31 35 65 35 63 32 62 33 35 33 32 |=fae915e5c2b3532| |00000000| 00 00 00 2d 14 01 00 01 29 72 73 70 61 75 74 68 |...-....)rspauth| +--------+-------------------------------------------------+----------------+ | 0 1 2 3 4 5 6 7 8 9 a b c d e f | +-------------------------------------------------+ 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] WRITE: 49B 20/10/30 21:42:51 DEBUG KryoMessageCodec: Encoded message of type org.apache.livy.rsc.rpc.Rpc$SaslMessage (45 bytes) 20/10/30 21:42:51 DEBUG RpcServer$SaslServerHandler: Sending SASL challenge response... 20/10/30 21:42:51 DEBUG RpcServer$SaslServerHandler: Handling SASL challenge message... 20/10/30 21:42:51 DEBUG KryoMessageCodec: Decoded message of type org.apache.livy.rsc.rpc.Rpc$SaslMessage (275 bytes) +--------+-------------------------------------------------+----------------+ |00000110| 6f 70 3d 61 75 74 68 |op=auth | |00000100| 30 32 66 32 35 66 33 33 34 38 30 34 37 64 2c 71 |02f25f3348047d,q| |000000f0| 31 31 37 65 31 30 39 65 36 34 63 30 36 62 33 35 |117e109e64c06b35| |000000e0| 35 35 33 36 2c 72 65 73 70 6f 6e 73 65 3d 36 63 |5536,response=6c| |000000d0| 73 63 2f 72 73 63 22 2c 6d 61 78 62 75 66 3d 36 |sc/rsc",maxbuf=6| |000000c0| 4b 22 2c 64 69 67 65 73 74 2d 75 72 69 3d 22 72 |K",digest-uri="r| |000000b0| 4a 2f 5a 54 65 52 36 4c 4a 41 50 57 73 6c 66 4c |J/ZTeR6LJAPWslfL| |000000a0| 76 57 67 39 52 33 31 37 73 39 4e 4b 57 46 33 4b |vWg9R317s9NKWF3K| |00000090| 2c 63 6e 6f 6e 63 65 3d 22 54 70 71 67 6f 75 56 |,cnonce="TpqgouV| |00000080| 71 4b 48 22 2c 6e 63 3d 30 30 30 30 30 30 30 31 |qKH",nc=00000001| |00000070| 70 4a 2b 52 79 52 49 42 6e 47 31 33 70 42 71 4d |pJ+RyRIBnG13pBqM| |00000060| 4c 67 43 41 36 33 38 62 2b 6b 4d 32 78 37 43 79 |LgCA638b+kM2x7Cy| |00000050| 73 63 22 2c 6e 6f 6e 63 65 3d 22 66 4c 6e 58 77 |sc",nonce="fLnXw| |00000040| 37 64 34 34 32 33 22 2c 72 65 61 6c 6d 3d 22 72 |7d4423",realm="r| |00000030| 34 32 32 34 2d 62 39 30 32 2d 39 38 61 31 31 37 |4224-b902-98a117| |00000020| 3d 22 65 65 66 62 36 35 36 64 2d 64 64 36 31 2d |="eefb656d-dd61-| |00000010| 74 3d 75 74 66 2d 38 2c 75 73 65 72 6e 61 6d 65 |t=utf-8,username| |00000000| 00 00 01 13 14 01 00 01 8e 02 63 68 61 72 73 65 |..........charse| +--------+-------------------------------------------------+----------------+ | 0 1 2 3 4 5 6 7 8 9 a b c d e f | +-------------------------------------------------+ 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] RECEIVED: 279B 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] FLUSH +--------+-------------------------------------------------+----------------+ |00000060| 35 2d 73 65 73 73 |5-sess | |00000050| 66 2d 38 2c 61 6c 67 6f 72 69 74 68 6d 3d 6d 64 |f-8,algorithm=md| |00000040| 4d 71 4b 48 22 2c 63 68 61 72 73 65 74 3d 75 74 |MqKH",charset=ut| |00000030| 79 70 4a 2b 52 79 52 49 42 6e 47 31 33 70 42 71 |ypJ+RyRIBnG13pBq| |00000020| 77 4c 67 43 41 36 33 38 62 2b 6b 4d 32 78 37 43 |wLgCA638b+kM2x7C| |00000010| 72 73 63 22 2c 6e 6f 6e 63 65 3d 22 66 4c 6e 58 |rsc",nonce="fLnX| |00000000| 00 00 00 62 14 01 00 01 5e 72 65 61 6c 6d 3d 22 |...b....^realm="| +--------+-------------------------------------------------+----------------+ | 0 1 2 3 4 5 6 7 8 9 a b c d e f | +-------------------------------------------------+ 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] WRITE: 102B 20/10/30 21:42:51 DEBUG KryoMessageCodec: Encoded message of type org.apache.livy.rsc.rpc.Rpc$SaslMessage (98 bytes) 20/10/30 21:42:51 DEBUG RpcServer$SaslServerHandler: Sending SASL challenge response... 20/10/30 21:42:51 DEBUG RpcServer$SaslServerHandler: Handling SASL challenge message... 20/10/30 21:42:51 DEBUG KryoMessageCodec: Decoded message of type org.apache.livy.rsc.rpc.Rpc$SaslMessage (41 bytes) 20/10/30 21:42:51 DEBUG Recycler: -Dio.netty.recycler.linkCapacity: 16 20/10/30 21:42:51 DEBUG Recycler: -Dio.netty.recycler.maxCapacity.default: 262144 +--------+-------------------------------------------------+----------------+ |00000020| 38 61 31 31 37 37 64 34 34 32 b3 01 01 |8a1177d442... | |00000010| 64 64 36 31 2d 34 32 32 34 2d 62 39 30 32 2d 39 |dd61-4224-b902-9| |00000000| 00 00 00 29 14 01 01 65 65 66 62 36 35 36 64 2d |...)...eefb656d-| +--------+-------------------------------------------------+----------------+ | 0 1 2 3 4 5 6 7 8 9 a b c d e f | +-------------------------------------------------+ 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] RECEIVED: 45B 20/10/30 21:42:51 DEBUG ResourceLeakDetector: -Dio.netty.leakDetection.maxRecords: 4 20/10/30 21:42:51 DEBUG ResourceLeakDetector: -Dio.netty.leakDetection.level: simple 20/10/30 21:42:51 DEBUG AbstractByteBuf: -Dio.netty.buffer.bytebuf.checkAccessible: true 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] ACTIVE 20/10/30 21:42:51 DEBUG Rpc: [id: 0x5e9c0940, L:/100.106.192.139:11000 - R:/10.139.22.111:57906] REGISTERED 20/10/30 21:42:50 DEBUG JavassistTypeParameterMatcherGenerator: Generated: io.netty.util.internal.__matchers__.org.apache.livy.rsc.rpc.Rpc$SaslMessageMatcher 20/10/30 21:42:46 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {applications { applicationId { id: 4 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-b415b264" host: "N/A" rpc_port: -1 yarn_application_state: ACCEPTED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/" diagnostics: "AM container is launched, waiting for AM container to Register with RM" startTime: 1604094166407 finishTime: 0 final_application_status: APP_UNDEFINED app_resource_Usage { num_used_containers: 1 num_reserved_containers: 0 used_resources { memory: 4512 virtual_cores: 1 } reserved_resources { memory: 0 virtual_cores: 0 } needed_resources { memory: 4512 virtual_cores: 1 } memory_seconds: 0 vcore_seconds: 0 8: 0x4192e000 9: 0x4192e000 10: 0 11: 0 } originalTrackingUrl: "N/A" currentApplicationAttemptId { application_id { id: 4 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 0.0 applicationType: "SPARK" applicationTags: "livy-session-0-xt4zdsdd" 21: 2 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 2 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-add7437d" host: "ip-10-85-100-82.vpc.internal" rpc_port: 35481 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0002/" diagnostics: "" startTime: 1604091553637 finishTime: 1604092159313 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2735224 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0002/1" currentApplicationAttemptId { application_id { id: 2 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-bhpwizot" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 3 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-ef0b565a" host: "ip-10-85-100-82.vpc.internal" rpc_port: 42329 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0003/" diagnostics: "" startTime: 1604093006235 finishTime: 1604093611385 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2731952 vcore_seconds: 605 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0003/1" currentApplicationAttemptId { application_id { id: 3 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-uyoshmxk" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 1 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-f8ff404c" host: "ip-10-85-100-91.vpc.internal" rpc_port: 32985 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0001/" diagnostics: "" startTime: 1604079442983 finishTime: 1604080049491 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2738016 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0001/1" currentApplicationAttemptId { application_id { id: 1 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-2hxcdpks" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:42:46 DEBUG ProtobufRpcEngine: Call: getApplications took 2ms 20/10/30 21:42:46 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {application_types: "SPARK"} 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 DEBUG Client: stopping client from cache: org.apache.hadoop.ipc.Client@41925502 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 INFO ShutdownHookManager: Deleting directory /tmp/spark-b019521e-7c6b-4a3b-a38e-00672ed3c401 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 INFO ShutdownHookManager: Deleting directory /tmp/spark-4ae3b2f7-0d2a-4dbb-9e56-7ab23e9ca6ca 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 INFO ShutdownHookManager: Shutdown hook called 20/10/30 21:42:46 INFO LineBufferedStream: user: hadoop 20/10/30 21:42:46 INFO LineBufferedStream: tracking URL: http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0004/ 20/10/30 21:42:46 INFO LineBufferedStream: final status: UNDEFINED 20/10/30 21:42:46 INFO LineBufferedStream: start time: 1604094166407 20/10/30 21:42:46 INFO LineBufferedStream: queue: default 20/10/30 21:42:46 INFO LineBufferedStream: ApplicationMaster RPC port: -1 20/10/30 21:42:46 INFO LineBufferedStream: ApplicationMaster host: N/A 20/10/30 21:42:46 INFO LineBufferedStream: diagnostics: [Fri Oct 30 21:42:46 +0000 2020] Application is Activated, waiting for resources to be assigned for AM. Details : AM Partition = CORE ; Partition Resource = ; Queue's Absolute capacity = 100.0 % ; Queue's Absolute used capacity = 0.0 % ; Queue's Absolute max capacity = 100.0 % ; 20/10/30 21:42:46 INFO LineBufferedStream: client token: N/A 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 INFO Client: 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 INFO Client: Application report for application_1604078122294_0004 (state: ACCEPTED) 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 2ms 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 INFO YarnClientImpl: Submitted application application_1604078122294_0004 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 DEBUG ProtobufRpcEngine: Call: getApplicationReport took 4ms 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 DEBUG ProtobufRpcEngine: Call: submitApplication took 86ms 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 DEBUG Client: IPC Client (2029780820) connection to ip-10-85-100-167.vpc.internal/10.85.100.167:8032 from hadoop: starting, having connections 2 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 DEBUG Client: Connecting to ip-10-85-100-167.vpc.internal/10.85.100.167:8032 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 DEBUG Client: The ping interval is 60000 ms. 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 INFO Client: Submitting application application_1604078122294_0004 to ResourceManager 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 DEBUG HadoopDelegationTokenManager: Service hbase does not require a token. Check your configuration to see if security is disabled or not. 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:929) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:920) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:144) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698) 20/10/30 21:42:46 INFO LineBufferedStream: at javax.security.auth.Subject.doAs(Subject.java:422) 20/10/30 21:42:46 INFO LineBufferedStream: at java.security.AccessController.doPrivileged(Native Method) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.SparkSubmit$$anon$3.run(SparkSubmit.scala:144) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.SparkSubmit$$anon$3.run(SparkSubmit.scala:146) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:845) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.yarn.YarnClusterApplication.start(Client.scala:1527) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.yarn.Client.run(Client.scala:1135) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:179) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:1014) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.yarn.Client.setupSecurityToken(Client.scala:310) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.yarn.security.YARNHadoopDelegationTokenManager.obtainDelegationTokens(YARNHadoopDelegationTokenManager.scala:59) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.security.HadoopDelegationTokenManager.obtainDelegationTokens(HadoopDelegationTokenManager.scala:130) 20/10/30 21:42:46 INFO LineBufferedStream: at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104) 20/10/30 21:42:46 INFO LineBufferedStream: at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241) 20/10/30 21:42:46 INFO LineBufferedStream: at scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:206) 20/10/30 21:42:46 INFO LineBufferedStream: at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) 20/10/30 21:42:46 INFO LineBufferedStream: at scala.collection.Iterator$class.foreach(Iterator.scala:891) 20/10/30 21:42:46 INFO LineBufferedStream: at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241) 20/10/30 21:42:46 INFO LineBufferedStream: at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.security.HadoopDelegationTokenManager$$anonfun$obtainDelegationTokens$2.apply(HadoopDelegationTokenManager.scala:130) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.security.HadoopDelegationTokenManager$$anonfun$obtainDelegationTokens$2.apply(HadoopDelegationTokenManager.scala:131) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.security.HBaseDelegationTokenProvider.delegationTokensRequired(HBaseDelegationTokenProvider.scala:62) 20/10/30 21:42:46 INFO LineBufferedStream: at org.apache.spark.deploy.security.HBaseDelegationTokenProvider.hbaseConf(HBaseDelegationTokenProvider.scala:69) 20/10/30 21:42:46 INFO LineBufferedStream: at java.lang.ClassLoader.loadClass(ClassLoader.java:351) 20/10/30 21:42:46 INFO LineBufferedStream: at java.lang.ClassLoader.loadClass(ClassLoader.java:418) 20/10/30 21:42:46 INFO LineBufferedStream: at java.net.URLClassLoader.findClass(URLClassLoader.java:382) 20/10/30 21:42:46 INFO LineBufferedStream: java.lang.ClassNotFoundException: org.apache.hadoop.hbase.HBaseConfiguration 20/10/30 21:42:46 INFO LineBufferedStream: 20/10/30 21:42:46 DEBUG HBaseDelegationTokenProvider: Fail to invoke HBaseConfiguration 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG HadoopDelegationTokenManager: Service hive does not require a token. Check your configuration to see if security is disabled or not. 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG HadoopDelegationTokenManager: Service hadoopfs does not require a token. Check your configuration to see if security is disabled or not. 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG HadoopDelegationTokenManager: Using the following builtin delegation token providers: hadoopfs, hive, hbase. 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 INFO SecurityManager: SecurityManager: authentication enabled; ui acls disabled; users with view permissions: Set(appuser, hadoop); groups with view permissions: Set(); users with modify permissions: Set(appuser, hadoop); groups with modify permissions: Set() 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 INFO SecurityManager: Changing modify acls groups to: 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 INFO SecurityManager: Changing view acls groups to: 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 INFO SecurityManager: Changing modify acls to: appuser,hadoop 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 INFO SecurityManager: Changing view acls to: appuser,hadoop 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: =============================================================================== 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: LD_LIBRARY_PATH=\"/usr/lib/hadoop/lib/native:/usr/lib/hadoop-lzo/lib/native:$LD_LIBRARY_PATH\" {{JAVA_HOME}}/bin/java -server -Xmx4096m -Djava.io.tmpdir={{PWD}}/tmp '-XX:+UseConcMarkSweepGC' '-XX:CMSInitiatingOccupancyFraction=70' '-XX:MaxHeapFreeRatio=70' '-XX:+CMSClassUnloadingEnabled' '-XX:OnOutOfMemoryError=kill -9 %p' -Dspark.yarn.app.container.log.dir= org.apache.spark.deploy.yarn.ApplicationMaster --class 'org.apache.livy.rsc.driver.RSCDriverBootstrapper' --properties-file {{PWD}}/__spark_conf__/__spark_conf__.properties 1> /stdout 2> /stderr 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: command: 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: datanucleus-rdbms-3.2.9.jar -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/datanucleus-rdbms-3.2.9.jar" } size: 1809447 timestamp: 1604094162533 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: livy-core_2.11-0.7.0-incubating.jar -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/livy-core_2.11-0.7.0-incubating.jar" } size: 95728 timestamp: 1604094162158 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: commons-codec-1.9.jar -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/commons-codec-1.9.jar" } size: 263965 timestamp: 1604094162125 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: livy-api-0.7.0-incubating.jar -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/livy-api-0.7.0-incubating.jar" } size: 14161 timestamp: 1604094161822 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: livy-thriftserver-session-0.7.0-incubating.jar -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/livy-thriftserver-session-0.7.0-incubating.jar" } size: 42040 timestamp: 1604094161946 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: netty-all-4.0.37.Final.jar -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/netty-all-4.0.37.Final.jar" } size: 2204062 timestamp: 1604094162044 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: __spark_conf__ -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/__spark_conf__.zip" } size: 291457 timestamp: 1604094163136 type: ARCHIVE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: hive-site.xml -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/hive-site.xml" } size: 2132 timestamp: 1604094162567 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: sparkr -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/sparkr.zip" } size: 1623235 timestamp: 1604094162637 type: ARCHIVE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: livy-rsc-0.7.0-incubating.jar -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/livy-rsc-0.7.0-incubating.jar" } size: 499087 timestamp: 1604094161911 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: livy-repl_2.11-0.7.0-incubating.jar -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/livy-repl_2.11-0.7.0-incubating.jar" } size: 1010321 timestamp: 1604094162213 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: datanucleus-api-jdo-3.2.6.jar -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/datanucleus-api-jdo-3.2.6.jar" } size: 339666 timestamp: 1604094162328 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: __spark_libs__ -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/__spark_libs__3907077530080990255.zip" } size: 238661437 timestamp: 1604094161514 type: ARCHIVE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: py4j-0.10.7-src.zip -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/py4j-0.10.7-src.zip" } size: 42437 timestamp: 1604094162722 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: datanucleus-core-3.2.10.jar -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/datanucleus-core-3.2.10.jar" } size: 1890075 timestamp: 1604094162442 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: pyspark.zip -> resource { scheme: "hdfs" host: "ip-10-85-100-167.vpc.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1604078122294_0004/pyspark.zip" } size: 591770 timestamp: 1604094162673 type: FILE visibility: PRIVATE 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: resources: 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: PYSPARK_PYTHON -> /usr/bin/python3 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: SPARK_PUBLIC_DNS -> $(hostname -f) 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: PYTHONPATH -> {{PWD}}/pyspark.zip{{PWD}}/py4j-0.10.7-src.zip{{PWD}}/pyspark.zip{{PWD}}/py4j-0.10.7-src.zip 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: PYTHONHASHSEED -> 0 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: SPARK_USER -> hadoop 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: SPARK_YARN_STAGING_DIR -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: CLASSPATH -> /usr/lib/hadoop-lzo/lib/*:/usr/lib/hadoop/hadoop-aws.jar:/usr/share/aws/aws-java-sdk/*:/usr/share/aws/emr/emrfs/conf:/usr/share/aws/emr/emrfs/lib/*:/usr/share/aws/emr/emrfs/auxlib/*:/usr/share/aws/emr/goodies/lib/emr-spark-goodies.jar:/usr/share/aws/emr/security/conf:/usr/share/aws/emr/security/lib/*:/usr/share/aws/hmclient/lib/aws-glue-datacatalog-spark-client.jar:/usr/share/java/Hive-JSON-Serde/hive-openx-serde.jar:/usr/share/aws/sagemaker-spark-sdk/lib/sagemaker-spark-sdk.jar:/usr/share/aws/emr/s3select/lib/emr-s3-select-spark-connector.jar{{PWD}}{{PWD}}/__spark_conf__{{PWD}}/__spark_libs__/*$HADOOP_CONF_DIR$HADOOP_COMMON_HOME/*$HADOOP_COMMON_HOME/lib/*$HADOOP_HDFS_HOME/*$HADOOP_HDFS_HOME/lib/*$HADOOP_MAPRED_HOME/*$HADOOP_MAPRED_HOME/lib/*$HADOOP_YARN_HOME/*$HADOOP_YARN_HOME/lib/*/usr/lib/hadoop-lzo/lib/*/usr/share/aws/emr/emrfs/conf/usr/share/aws/emr/emrfs/lib/*/usr/share/aws/emr/emrfs/auxlib/*/usr/share/aws/emr/lib/*/usr/share/aws/emr/ddb/lib/emr-ddb-hadoop.jar/usr/share/aws/emr/goodies/lib/emr-hadoop-goodies.jar/usr/share/aws/emr/kinesis/lib/emr-kinesis-hadoop.jar/usr/lib/spark/yarn/lib/datanucleus-api-jdo.jar/usr/lib/spark/yarn/lib/datanucleus-core.jar/usr/lib/spark/yarn/lib/datanucleus-rdbms.jar/usr/share/aws/emr/cloudwatch-sink/lib/*/usr/share/aws/aws-java-sdk/*$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*/usr/lib/hadoop-lzo/lib/*/usr/share/aws/emr/emrfs/conf/usr/share/aws/emr/emrfs/lib/*/usr/share/aws/emr/emrfs/auxlib/*/usr/share/aws/emr/lib/*/usr/share/aws/emr/ddb/lib/emr-ddb-hadoop.jar/usr/share/aws/emr/goodies/lib/emr-hadoop-goodies.jar/usr/share/aws/emr/kinesis/lib/emr-kinesis-hadoop.jar/usr/share/aws/emr/cloudwatch-sink/lib/*/usr/share/aws/aws-java-sdk/*{{PWD}}/__spark_conf__/__hadoop_conf__ 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: env: 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: user class: org.apache.livy.rsc.driver.RSCDriverBootstrapper 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: YARN AM launch context: 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG Client: =============================================================================== 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG ProtobufRpcEngine: Call: setReplication took 3ms 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.91, datanodeId = DatanodeInfoWithStorage[10.85.100.91:50010,DS-8ff5d078-5d3e-4538-b5c4-593644ce9111,DISK] 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG ProtobufRpcEngine: Call: create took 6ms 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:43 INFO LineBufferedStream: 20/10/30 21:42:43 INFO Client: Uploading resource file:/tmp/spark-4ae3b2f7-0d2a-4dbb-9e56-7ab23e9ca6ca/__spark_conf__5713781115016164935.zip -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/__spark_conf__.zip 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 WARN Client: Same name resource file:///opt/spark/python/lib/py4j-0.10.7-src.zip added multiple times to distributed cache 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 WARN Client: Same name resource file:///opt/spark/python/lib/py4j-0.10.7-src.zip added multiple times to distributed cache 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 WARN Client: Same name resource file:///opt/spark/python/lib/pyspark.zip added multiple times to distributed cache 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 WARN Client: Same name resource file:///opt/spark/python/lib/pyspark.zip added multiple times to distributed cache 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.91, datanodeId = DatanodeInfoWithStorage[10.85.100.91:50010,DS-8ff5d078-5d3e-4538-b5c4-593644ce9111,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.91, datanodeId = DatanodeInfoWithStorage[10.85.100.91:50010,DS-8ff5d078-5d3e-4538-b5c4-593644ce9111,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/python/lib/py4j-0.10.7-src.zip -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/py4j-0.10.7-src.zip 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/python/lib/py4j-0.10.7-src.zip -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/py4j-0.10.7-src.zip 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.91, datanodeId = DatanodeInfoWithStorage[10.85.100.91:50010,DS-8ff5d078-5d3e-4538-b5c4-593644ce9111,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.91, datanodeId = DatanodeInfoWithStorage[10.85.100.91:50010,DS-8ff5d078-5d3e-4538-b5c4-593644ce9111,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/python/lib/pyspark.zip -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/pyspark.zip 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/python/lib/pyspark.zip -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/pyspark.zip 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 4ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 4ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/R/lib/sparkr.zip#sparkr -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/sparkr.zip 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/R/lib/sparkr.zip#sparkr -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/sparkr.zip 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/livy/conf/hive-site.xml -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/hive-site.xml 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/livy/conf/hive-site.xml -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/hive-site.xml 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 45ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 45ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/jars/datanucleus-rdbms-3.2.9.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/datanucleus-rdbms-3.2.9.jar 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/jars/datanucleus-rdbms-3.2.9.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/datanucleus-rdbms-3.2.9.jar 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.91, datanodeId = DatanodeInfoWithStorage[10.85.100.91:50010,DS-8ff5d078-5d3e-4538-b5c4-593644ce9111,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.91, datanodeId = DatanodeInfoWithStorage[10.85.100.91:50010,DS-8ff5d078-5d3e-4538-b5c4-593644ce9111,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 4ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 4ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/jars/datanucleus-core-3.2.10.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/datanucleus-core-3.2.10.jar 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/jars/datanucleus-core-3.2.10.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/datanucleus-core-3.2.10.jar 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 8ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 8ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/spark/jars/datanucleus-api-jdo-3.2.6.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/datanucleus-api-jdo-3.2.6.jar 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 4ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/livy/repl_2.11-jars/livy-repl_2.11-0.7.0-incubating.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/livy-repl_2.11-0.7.0-incubating.jar 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.91, datanodeId = DatanodeInfoWithStorage[10.85.100.91:50010,DS-8ff5d078-5d3e-4538-b5c4-593644ce9111,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/livy/repl_2.11-jars/livy-core_2.11-0.7.0-incubating.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/livy-core_2.11-0.7.0-incubating.jar 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.91, datanodeId = DatanodeInfoWithStorage[10.85.100.91:50010,DS-8ff5d078-5d3e-4538-b5c4-593644ce9111,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 INFO Client: Uploading resource file:/opt/livy/repl_2.11-jars/commons-codec-1.9.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/commons-codec-1.9.jar 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 6ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: setReplication took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: complete took 7ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: create took 3ms 20/10/30 21:42:42 INFO LineBufferedStream: 20/10/30 21:42:42 DEBUG ProtobufRpcEngine: Call: getFileInfo took 47ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 INFO Client: Uploading resource file:/opt/livy/rsc-jars/netty-all-4.0.37.Final.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/netty-all-4.0.37.Final.jar 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.91, datanodeId = DatanodeInfoWithStorage[10.85.100.91:50010,DS-8ff5d078-5d3e-4538-b5c4-593644ce9111,DISK] 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: addBlock took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: create took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 INFO Client: Uploading resource file:/opt/livy/rsc-jars/livy-thriftserver-session-0.7.0-incubating.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/livy-thriftserver-session-0.7.0-incubating.jar 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: setPermission took 1ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: complete took 5ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: addBlock took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: create took 3ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 INFO Client: Uploading resource file:/opt/livy/rsc-jars/livy-rsc-0.7.0-incubating.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/livy-rsc-0.7.0-incubating.jar 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: setPermission took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: complete took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: addBlock took 79ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: create took 4ms 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {applications { applicationId { id: 2 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-add7437d" host: "ip-10-85-100-82.vpc.internal" rpc_port: 35481 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0002/" diagnostics: "" startTime: 1604091553637 finishTime: 1604092159313 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2735224 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0002/1" currentApplicationAttemptId { application_id { id: 2 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-bhpwizot" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 3 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-ef0b565a" host: "ip-10-85-100-82.vpc.internal" rpc_port: 42329 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0003/" diagnostics: "" startTime: 1604093006235 finishTime: 1604093611385 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2731952 vcore_seconds: 605 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0003/1" currentApplicationAttemptId { application_id { id: 3 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-uyoshmxk" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 1 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-f8ff404c" host: "ip-10-85-100-91.vpc.internal" rpc_port: 32985 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0001/" diagnostics: "" startTime: 1604079442983 finishTime: 1604080049491 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2738016 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0001/1" currentApplicationAttemptId { application_id { id: 1 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-2hxcdpks" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getApplications took 3ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {application_types: "SPARK"} 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 INFO Client: Uploading resource file:/opt/livy/rsc-jars/livy-api-0.7.0-incubating.jar -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/livy-api-0.7.0-incubating.jar 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 1ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 9ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 3ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: getFileInfo took 66ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG Client: getting client out of cache: org.apache.hadoop.ipc.Client@41925502 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG RetryUtils: multipleLinearRandomRetry = null 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG BlockReaderLocal: dfs.domain.socket.path = 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG BlockReaderLocal: dfs.client.read.shortcircuit = false 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG UserGroupInformation: PrivilegedAction as:hadoop (auth:PROXY) via hadoop (auth:SIMPLE) from:org.apache.hadoop.fs.FileContext.getAbstractFileSystem(FileContext.java:331) 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: setPermission took 3ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: setReplication took 2ms 20/10/30 21:42:41 INFO LineBufferedStream: 20/10/30 21:42:41 DEBUG ProtobufRpcEngine: Call: complete took 3ms 20/10/30 21:42:37 INFO LineBufferedStream: 20/10/30 21:42:37 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:37 INFO LineBufferedStream: 20/10/30 21:42:37 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:37 INFO LineBufferedStream: 20/10/30 21:42:37 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:37 INFO LineBufferedStream: 20/10/30 21:42:37 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:37 INFO LineBufferedStream: 20/10/30 21:42:37 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:37 INFO LineBufferedStream: 20/10/30 21:42:37 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:37 INFO LineBufferedStream: 20/10/30 21:42:37 DEBUG ProtobufRpcEngine: Call: addBlock took 3ms 20/10/30 21:42:37 INFO LineBufferedStream: 20/10/30 21:42:37 DEBUG Client: IPC Client (2029780820) connection to ip-10-85-100-167.vpc.internal/10.85.100.167:8032 from hadoop: stopped, remaining connections 1 20/10/30 21:42:37 INFO LineBufferedStream: 20/10/30 21:42:37 DEBUG Client: IPC Client (2029780820) connection to ip-10-85-100-167.vpc.internal/10.85.100.167:8032 from hadoop: closed 20/10/30 21:42:36 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {applications { applicationId { id: 2 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-add7437d" host: "ip-10-85-100-82.vpc.internal" rpc_port: 35481 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0002/" diagnostics: "" startTime: 1604091553637 finishTime: 1604092159313 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2735224 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0002/1" currentApplicationAttemptId { application_id { id: 2 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-bhpwizot" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 3 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-ef0b565a" host: "ip-10-85-100-82.vpc.internal" rpc_port: 42329 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0003/" diagnostics: "" startTime: 1604093006235 finishTime: 1604093611385 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2731952 vcore_seconds: 605 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0003/1" currentApplicationAttemptId { application_id { id: 3 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-uyoshmxk" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 1 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-f8ff404c" host: "ip-10-85-100-91.vpc.internal" rpc_port: 32985 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0001/" diagnostics: "" startTime: 1604079442983 finishTime: 1604080049491 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2738016 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0001/1" currentApplicationAttemptId { application_id { id: 1 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-2hxcdpks" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:42:36 DEBUG ProtobufRpcEngine: Call: getApplications took 2ms 20/10/30 21:42:36 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {application_types: "SPARK"} 20/10/30 21:42:31 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {applications { applicationId { id: 2 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-add7437d" host: "ip-10-85-100-82.vpc.internal" rpc_port: 35481 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0002/" diagnostics: "" startTime: 1604091553637 finishTime: 1604092159313 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2735224 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0002/1" currentApplicationAttemptId { application_id { id: 2 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-bhpwizot" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 3 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-ef0b565a" host: "ip-10-85-100-82.vpc.internal" rpc_port: 42329 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0003/" diagnostics: "" startTime: 1604093006235 finishTime: 1604093611385 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2731952 vcore_seconds: 605 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0003/1" currentApplicationAttemptId { application_id { id: 3 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-uyoshmxk" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 1 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-f8ff404c" host: "ip-10-85-100-91.vpc.internal" rpc_port: 32985 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0001/" diagnostics: "" startTime: 1604079442983 finishTime: 1604080049491 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2738016 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0001/1" currentApplicationAttemptId { application_id { id: 1 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-2hxcdpks" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:42:31 DEBUG ProtobufRpcEngine: Call: getApplications took 73ms 20/10/30 21:42:31 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {application_types: "SPARK"} 20/10/30 21:42:31 INFO LineBufferedStream: 20/10/30 21:42:31 DEBUG PerformanceAdvisory: Using crypto codec org.apache.hadoop.crypto.JceAesCtrCryptoCodec. 20/10/30 21:42:31 INFO LineBufferedStream: 20/10/30 21:42:31 DEBUG PerformanceAdvisory: Crypto codec org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec is not available. 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:448) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1262) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.createBlockOutputStream(DFSOutputStream.java:1314) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:183) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:211) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:242) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getEncryptedStreams(SaslDataTransferClient.java:299) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:490) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.createStreamPair(DataTransferSaslUtil.java:339) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.crypto.CryptoCodec.getInstance(CryptoCodec.java:67) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:132) 20/10/30 21:42:31 INFO LineBufferedStream: at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 20/10/30 21:42:31 INFO LineBufferedStream: at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 20/10/30 21:42:31 INFO LineBufferedStream: at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 20/10/30 21:42:31 INFO LineBufferedStream: at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec.(OpensslAesCtrCryptoCodec.java:50) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.crypto.OpensslCipher.(OpensslCipher.java:84) 20/10/30 21:42:31 INFO LineBufferedStream: at org.apache.hadoop.util.NativeCodeLoader.buildSupportsOpenssl(Native Method) 20/10/30 21:42:31 INFO LineBufferedStream: java.lang.UnsatisfiedLinkError: org.apache.hadoop.util.NativeCodeLoader.buildSupportsOpenssl()Z 20/10/30 21:42:31 INFO LineBufferedStream: 20/10/30 21:42:31 DEBUG OpensslCipher: Failed to load OpenSSL Cipher. 20/10/30 21:42:31 INFO LineBufferedStream: 20/10/30 21:42:31 DEBUG DataTransferSaslUtil: Creating IOStreamPair of CryptoInputStream and CryptoOutputStream. 20/10/30 21:42:31 INFO LineBufferedStream: 20/10/30 21:42:31 DEBUG DataTransferSaslUtil: Verifying QOP, requested QOP = [auth-conf], negotiated QOP = auth-conf 20/10/30 21:42:31 INFO LineBufferedStream: 20/10/30 21:42:31 DEBUG SaslDataTransferClient: Client using encryption algorithm null 20/10/30 21:42:31 INFO LineBufferedStream: 20/10/30 21:42:31 DEBUG SaslDataTransferClient: SASL client doing encrypted handshake for addr = /10.85.100.82, datanodeId = DatanodeInfoWithStorage[10.85.100.82:50010,DS-feac361b-5d6a-402b-9b9c-cabfb6196602,DISK] 20/10/30 21:42:31 INFO LineBufferedStream: 20/10/30 21:42:31 DEBUG ProtobufRpcEngine: Call: getDataEncryptionKey took 3ms 20/10/30 21:42:31 INFO LineBufferedStream: 20/10/30 21:42:31 DEBUG ProtobufRpcEngine: Call: getServerDefaults took 87ms 20/10/30 21:42:30 INFO LineBufferedStream: 20/10/30 21:42:30 DEBUG ProtobufRpcEngine: Call: addBlock took 5ms 20/10/30 21:42:30 INFO LineBufferedStream: 20/10/30 21:42:30 DEBUG ProtobufRpcEngine: Call: create took 4ms 20/10/30 21:42:30 INFO LineBufferedStream: 20/10/30 21:42:30 DEBUG ProtobufRpcEngine: Call: getFileInfo took 3ms 20/10/30 21:42:30 INFO LineBufferedStream: 20/10/30 21:42:30 INFO Client: Uploading resource file:/tmp/spark-4ae3b2f7-0d2a-4dbb-9e56-7ab23e9ca6ca/__spark_libs__3907077530080990255.zip -> hdfs://ip-10-85-100-167.vpc.internal:8020/user/hadoop/.sparkStaging/application_1604078122294_0004/__spark_libs__3907077530080990255.zip 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 WARN Client: Neither spark.yarn.jars nor spark.yarn.archive is set, falling back to uploading libraries under SPARK_HOME. 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG ProtobufRpcEngine: Call: setPermission took 3ms 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG ProtobufRpcEngine: Call: mkdirs took 7ms 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG Client: IPC Client (2029780820) connection to ip-10-85-100-167.vpc.internal/10.85.100.167:8020 from hadoop: starting, having connections 2 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG Client: Connecting to ip-10-85-100-167.vpc.internal/10.85.100.167:8020 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG Client: The ping interval is 60000 ms. 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 INFO Client: Preparing resources for our AM container 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 INFO Client: Setting up the launch environment for our AM container 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 INFO Client: Setting up container launch context for our AM 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 INFO Client: Will allocate AM container, with 4505 MB memory including 409 MB overhead 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 INFO Client: Verifying our application has not requested more than the maximum memory capability of the cluster (12288 MB per container) 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG ProtobufRpcEngine: Call: getNewApplication took 3ms 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 INFO Client: Requesting a new application from cluster with 2 NodeManagers 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG ProtobufRpcEngine: Call: getClusterMetrics took 184ms 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG Client: IPC Client (2029780820) connection to ip-10-85-100-167.vpc.internal/10.85.100.167:8032 from hadoop: starting, having connections 1 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG Client: Connecting to ip-10-85-100-167.vpc.internal/10.85.100.167:8032 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG Client: The ping interval is 60000 ms. 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG AbstractService: Service org.apache.hadoop.yarn.client.api.impl.YarnClientImpl is started 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG Client: getting client out of cache: org.apache.hadoop.ipc.Client@41925502 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG HadoopYarnProtoRPC: Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ApplicationClientProtocol 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG YarnRPC: Creating YarnRPC for org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG UserGroupInformation: PrivilegedAction as:hadoop (auth:PROXY) via hadoop (auth:SIMPLE) from:org.apache.hadoop.yarn.client.RMProxy.getProxy(RMProxy.java:136) 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 INFO RMProxy: Connecting to ResourceManager at ip-10-85-100-167.vpc.internal/10.85.100.167:8032 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG AbstractService: Service: org.apache.hadoop.yarn.client.api.impl.YarnClientImpl entered state INITED 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection 20/10/30 21:42:27 INFO LineBufferedStream: 20/10/30 21:42:27 DEBUG PerformanceAdvisory: Both short-circuit local reads and UNIX domain socket are disabled. 20/10/30 21:42:26 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {applications { applicationId { id: 2 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-add7437d" host: "ip-10-85-100-82.vpc.internal" rpc_port: 35481 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0002/" diagnostics: "" startTime: 1604091553637 finishTime: 1604092159313 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2735224 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0002/1" currentApplicationAttemptId { application_id { id: 2 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-bhpwizot" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 3 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-ef0b565a" host: "ip-10-85-100-82.vpc.internal" rpc_port: 42329 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0003/" diagnostics: "" startTime: 1604093006235 finishTime: 1604093611385 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2731952 vcore_seconds: 605 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0003/1" currentApplicationAttemptId { application_id { id: 3 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-uyoshmxk" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 1 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-f8ff404c" host: "ip-10-85-100-91.vpc.internal" rpc_port: 32985 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0001/" diagnostics: "" startTime: 1604079442983 finishTime: 1604080049491 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2738016 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0001/1" currentApplicationAttemptId { application_id { id: 1 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-2hxcdpks" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:42:26 DEBUG ProtobufRpcEngine: Call: getApplications took 3ms 20/10/30 21:42:26 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {application_types: "SPARK"} 20/10/30 21:42:26 INFO LineBufferedStream: 20/10/30 21:42:26 DEBUG Client: getting client out of cache: org.apache.hadoop.ipc.Client@41925502 20/10/30 21:42:26 INFO LineBufferedStream: 20/10/30 21:42:26 DEBUG Server: rpcKind=RPC_PROTOCOL_BUFFER, rpcRequestWrapperClass=class org.apache.hadoop.ipc.ProtobufRpcEngine$RpcRequestWrapper, rpcInvoker=org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker@192d74fb 20/10/30 21:42:26 INFO LineBufferedStream: 20/10/30 21:42:26 DEBUG RetryUtils: multipleLinearRandomRetry = null 20/10/30 21:42:26 INFO LineBufferedStream: 20/10/30 21:42:26 DEBUG BlockReaderLocal: dfs.domain.socket.path = 20/10/30 21:42:26 INFO LineBufferedStream: 20/10/30 21:42:26 DEBUG BlockReaderLocal: dfs.client.domain.socket.data.traffic = false 20/10/30 21:42:26 INFO LineBufferedStream: 20/10/30 21:42:26 DEBUG BlockReaderLocal: dfs.client.read.shortcircuit = false 20/10/30 21:42:26 INFO LineBufferedStream: 20/10/30 21:42:26 DEBUG BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false 20/10/30 21:42:24 INFO LineBufferedStream: 20/10/30 21:42:24 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 20/10/30 21:42:22 DEBUG MimeDetectorRegistry: Registering MimeDetector with name [eu.medsea.mimeutil.detector.ExtensionMimeDetector] and description [Get the mime types of file extensions] 20/10/30 21:42:22 DEBUG MimeDetectorRegistry: Registering MimeDetector with name [eu.medsea.mimeutil.detector.MagicMimeMimeDetector] and description [Get the mime types of files or streams using the Unix file(5) magic.mime files] 20/10/30 21:42:22 DEBUG MagicMimeMimeDetector: Parsing "resource:eu/medsea/mimeutil/magic.mime" took 285 msec. 20/10/30 21:42:21 DEBUG ProtobufRpcEngine: 61: Response <- ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {applications { applicationId { id: 2 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-add7437d" host: "ip-10-85-100-82.vpc.internal" rpc_port: 35481 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0002/" diagnostics: "" startTime: 1604091553637 finishTime: 1604092159313 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2735224 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0002/1" currentApplicationAttemptId { application_id { id: 2 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-bhpwizot" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 3 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-ef0b565a" host: "ip-10-85-100-82.vpc.internal" rpc_port: 42329 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0003/" diagnostics: "" startTime: 1604093006235 finishTime: 1604093611385 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2731952 vcore_seconds: 605 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0003/1" currentApplicationAttemptId { application_id { id: 3 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-uyoshmxk" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" } applications { applicationId { id: 1 cluster_timestamp: 1604078122294 } user: "hadoop" queue: "default" name: "Demo-f8ff404c" host: "ip-10-85-100-91.vpc.internal" rpc_port: 32985 yarn_application_state: FINISHED trackingUrl: "http://ip-10-85-100-167.vpc.internal:20888/proxy/application_1604078122294_0001/" diagnostics: "" startTime: 1604079442983 finishTime: 1604080049491 final_application_status: APP_SUCCEEDED app_resource_Usage { num_used_containers: -1 num_reserved_containers: -1 used_resources { memory: -1 virtual_cores: -1 } reserved_resources { memory: -1 virtual_cores: -1 } needed_resources { memory: -1 virtual_cores: -1 } memory_seconds: 2738016 vcore_seconds: 606 10: 0 11: 0 } originalTrackingUrl: "ip-10-85-100-167.vpc.internal:18080/history/application_1604078122294_0001/1" currentApplicationAttemptId { application_id { id: 1 cluster_timestamp: 1604078122294 } attemptId: 1 } progress: 1.0 applicationType: "SPARK" applicationTags: "livy-session-0-2hxcdpks" 21: 4 22: 0 23: "\b\000" 24: "" 25: "CORE" }} 20/10/30 21:42:21 DEBUG ProtobufRpcEngine: Call: getApplications took 816ms 20/10/30 21:42:21 DEBUG Client: IPC Client (993094877) connection to ip-10-85-100-167.vpc.internal/10.85.100.167:8032 from hadoop: starting, having connections 1 20/10/30 21:42:21 DEBUG Client: Connecting to ip-10-85-100-167.vpc.internal/10.85.100.167:8032 20/10/30 21:42:21 DEBUG Client: The ping interval is 60000 ms. 20/10/30 21:42:21 DEBUG ProtobufRpcEngine: 61: Call -> ip-10-85-100-167.vpc.internal/10.85.100.167:8032: getApplications {application_types: "SPARK"} 20/10/30 21:42:20 INFO InteractiveSessionManager: Registered new session 0 20/10/30 21:42:20 DEBUG RSCClient: Sending JobRequest[20a80b05-0b63-42be-8678-bf8dbed969e9]. 20/10/30 21:42:20 INFO InteractiveSessionManager: Registering new session 0 20/10/30 21:42:20 INFO RpcServer: Connected to the port 11000 20/10/30 21:42:20 DEBUG NetUtil: /proc/sys/net/core/somaxconn: 128 20/10/30 21:42:20 DEBUG NetUtil: Loopback interface: lo (lo, 127.0.0.1) 20/10/30 21:42:20 DEBUG ByteBufUtil: -Dio.netty.maxThreadLocalCharBufferSize: 16384 20/10/30 21:42:20 DEBUG ByteBufUtil: -Dio.netty.threadLocalDirectBufferSize: 65536 20/10/30 21:42:20 DEBUG ByteBufUtil: -Dio.netty.allocator.type: unpooled 20/10/30 21:42:19 DEBUG ThreadLocalRandom: -Dio.netty.initialSeedUniquifier: 0xd31e05471c8b0014 (took 0 ms) 20/10/30 21:42:19 DEBUG NioEventLoop: -Dio.netty.selectorAutoRebuildThreshold: 512 20/10/30 21:42:19 DEBUG NioEventLoop: -Dio.netty.noKeySetOptimization: false 20/10/30 21:42:19 DEBUG PlatformDependent: io.netty.maxDirectMemory: 778502144 bytes 20/10/30 21:42:19 DEBUG PlatformDependent: -Dio.netty.noPreferDirect: false 20/10/30 21:42:19 DEBUG PlatformDependent: -Dio.netty.bitMode: 64 (sun.arch.data.model) 20/10/30 21:42:19 DEBUG PlatformDependent: -Dio.netty.tmpdir: /tmp (java.io.tmpdir) 20/10/30 21:42:19 DEBUG PlatformDependent: Javassist: available 20/10/30 21:42:19 DEBUG PlatformDependent: -Dio.netty.noJavassist: false 20/10/30 21:42:19 DEBUG PlatformDependent: sun.misc.Unsafe: available 20/10/30 21:42:19 DEBUG PlatformDependent: -Dio.netty.noUnsafe: false 20/10/30 21:42:19 DEBUG PlatformDependent: Java version: 8 20/10/30 21:42:19 DEBUG PlatformDependent0: java.nio.DirectByteBuffer.(long, int): available 20/10/30 21:42:19 DEBUG PlatformDependent0: java.nio.Bits.unaligned: true 20/10/30 21:42:19 DEBUG PlatformDependent0: sun.misc.Unsafe.copyMemory: available 20/10/30 21:42:19 DEBUG PlatformDependent0: sun.misc.Unsafe.theUnsafe: available 20/10/30 21:42:19 DEBUG PlatformDependent0: java.nio.Buffer.address: available 20/10/30 21:42:19 DEBUG MultithreadEventLoopGroup: -Dio.netty.eventLoopThreads: 2 20/10/30 21:42:19 DEBUG InternalLoggerFactory: Using SLF4J as the default logging framework 20/10/30 21:42:19 INFO InteractiveSession$: Creating Interactive session 0: [owner: null, request: [kind: spark, proxyUser: Some(hadoop), driverMemory: 4G, name: Demo-b415b264, conf: livy.rsc.rpc.server.address -> a.example.com, heartbeatTimeoutInSecond: 0]] 20/10/30 21:42:19 DEBUG InteractiveSession$: Enable HiveContext because hive-site.xml is found under classpath, /opt/livy/conf/hive-site.xml 20/10/30 21:42:19 DEBUG AccessManager: user=null aclsOn=false superAcls=