Uploaded image for project: 'Apache Hudi'
  1. Apache Hudi
  2. HUDI-3751

Hive count throw exception after truncate table

    XMLWordPrintableJSON

Details

    • Bug
    • Status: Closed
    • Major
    • Resolution: Fixed
    • None
    • 0.11.0
    • hive
    • None

    Description

      // Spark-Sql 
      // create table
      create table test_hudi_table (
        id int,
        name string,
        price double,
        ts long,
        year string,
        month string,
        day string
      ) using hudi
      partitioned by (year,month,day)
       options (
        primaryKey = 'id',
        preCombineField = 'ts',
        type = 'cow'
       );
      
      // insert
      insert into test_hudi_table values (1,'hudi', 10.0,1000,'2022','03','31');
      // truncate
      truncate table test_hudi_table ;
       // hive tez
      select count(1) from
      test_hudi_table;
      

       

       

      //then throw exception
      ERROR : Status: Failed
      ERROR : Vertex failed, vertexName=Map 1, vertexId=vertex_1648681063719_0012_1_00, diagnostics=[Vertex vertex_1648681063719_0012_1_00 [Map 1] killed/failed due to:ROOT_INPUT_INIT_FAILURE, Vertex Input: test_hudi_table initializer failed, vertex=vertex_1648681063719_0012_1_00 [Map 1], java.io.FileNotFoundException: File does not exist: /test_hudi/test_hudi_table/year=2022/month=03/day=31/dd21e7de-430d-4368-8667-07a71e078e3a-0_0-33-1616_20220331095249651.parquet
              at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:86)
              at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:76)
              at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getBlockLocations(FSDirStatAndListingOp.java:158)
              at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1931)
              at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:738)
              at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:426)
              at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
              at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
              at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)
              at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:876)
              at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:822)
              at java.security.AccessController.doPrivileged(Native Method)
              at javax.security.auth.Subject.doAs(Subject.java:422)
              at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
              at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2682)        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
              at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
              at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
              at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
              at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
              at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
              at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:864)
              at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:851)
              at org.apache.hadoop.hdfs.DFSClient.getBlockLocations(DFSClient.java:908)
              at org.apache.hadoop.hdfs.DistributedFileSystem$2.doCall(DistributedFileSystem.java:274)
              at org.apache.hadoop.hdfs.DistributedFileSystem$2.doCall(DistributedFileSystem.java:271)
              at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
              at org.apache.hadoop.hdfs.DistributedFileSystem.getFileBlockLocations(DistributedFileSystem.java:281)
              at org.apache.hadoop.hdfs.DistributedFileSystem.getFileBlockLocations(DistributedFileSystem.java:255)
              at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:361)
              at org.apache.hudi.hadoop.HoodieParquetInputFormatBase.getSplits(HoodieParquetInputFormatBase.java:68)
              at org.apache.hadoop.hive.ql.io.HiveInputFormat.addSplitsForGroup(HiveInputFormat.java:524)
              at org.apache.hadoop.hive.ql.io.HiveInputFormat.getSplits(HiveInputFormat.java:779)
              at org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator.initialize(HiveSplitGenerator.java:243)
              at org.apache.tez.dag.app.dag.RootInputInitializerManager$InputInitializerCallable$1.run(RootInputInitializerManager.java:278)
              at org.apache.tez.dag.app.dag.RootInputInitializerManager$InputInitializerCallable$1.run(RootInputInitializerManager.java:269)
              at java.security.AccessController.doPrivileged(Native Method)
              at javax.security.auth.Subject.doAs(Subject.java:422)
              at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
              at org.apache.tez.dag.app.dag.RootInputInitializerManager$InputInitializerCallable.call(RootInputInitializerManager.java:269)
              at org.apache.tez.dag.app.dag.RootInputInitializerManager$InputInitializerCallable.call(RootInputInitializerManager.java:253)
              at com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:108)
              at com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:41)
              at com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:77)
              at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
              at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
              at java.lang.Thread.run(Thread.java:748)
      Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): File does not exist: /test_hudi/test_hudi_table/year=2022/month=03/day=31/dd21e7de-430d-4368-8667-07a71e078e3a-0_0-33-1616_20220331095249651.parquet
              at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:86)
              at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:76)
              at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getBlockLocations(FSDirStatAndListingOp.java:158)
              at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1931)
              at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:738)
              at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:426)
              at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
              at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
              at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)
              at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:876)
              at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:822)
              at java.security.AccessController.doPrivileged(Native Method)
              at javax.security.auth.Subject.doAs(Subject.java:422)
              at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
              at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2682)        at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1497)
              at org.apache.hadoop.ipc.Client.call(Client.java:1443)
              at org.apache.hadoop.ipc.Client.call(Client.java:1353)
              at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
              at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
              at com.sun.proxy.$Proxy12.getBlockLocations(Unknown Source)
              at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:317)
              at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
              at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
              at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
              at java.lang.reflect.Method.invoke(Method.java:498)
              at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
              at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
              at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
              at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
              at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
              at com.sun.proxy.$Proxy13.getBlockLocations(Unknown Source)
              at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:862)
              ... 25 more
       

       

       

       

       

      Attachments

        Issue Links

          Activity

            People

              dongkelun 董可伦
              dongkelun 董可伦
              Votes:
              0 Vote for this issue
              Watchers:
              1 Start watching this issue

              Dates

                Created:
                Updated:
                Resolved: