Uploaded image for project: 'Hadoop HDFS'
  1. Hadoop HDFS
  2. HDFS-14708

TestLargeBlockReport#testBlockReportSucceedsWithLargerLengthLimit fails in trunk

    XMLWordPrintableJSON

Details

    • Bug
    • Status: Resolved
    • Minor
    • Resolution: Fixed
    • None
    • 3.3.0
    • None
    • None
    • Reviewed

    Description

      [ERROR] testBlockReportSucceedsWithLargerLengthLimit(org.apache.hadoop.hdfs.server.datanode.TestLargeBlockReport)  Time elapsed: 47.956 s  <<< ERROR!
      org.apache.hadoop.ipc.RemoteException(java.io.IOException): java.lang.IllegalStateException: com.google.protobuf.InvalidProtocolBufferException: Protocol message was too large.  May be malicious.  Use CodedInputStream.setSizeLimit() to increase the size limit.
      	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.runBlockOp(BlockManager.java:5011)
      	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.blockReport(NameNodeRpcServer.java:1581)
      	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolServerSideTranslatorPB.blockReport(DatanodeProtocolServerSideTranslatorPB.java:181)
      	at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos$DatanodeProtocolService$2.callBlockingMethod(DatanodeProtocolProtos.java:31664)
      	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:529)
      	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1070)
      	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1001)
      	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:929)
      	at java.security.AccessController.doPrivileged(Native Method)
      	at javax.security.auth.Subject.doAs(Subject.java:422)
      	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1891)
      	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2921)
      Caused by: java.lang.IllegalStateException: com.google.protobuf.InvalidProtocolBufferException: Protocol message was too large.  May be malicious.  Use CodedInputStream.setSizeLimit() to increase the size limit.
      	at org.apache.hadoop.hdfs.protocol.BlockListAsLongs$BufferDecoder$1.next(BlockListAsLongs.java:424)
      	at org.apache.hadoop.hdfs.protocol.BlockListAsLongs$BufferDecoder$1.next(BlockListAsLongs.java:396)
      	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.reportDiffSorted(BlockManager.java:2952)
      	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.processReport(BlockManager.java:2787)
      	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.processReport(BlockManager.java:2655)
      	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.lambda$blockReport$0(NameNodeRpcServer.java:1582)
      	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
      	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$BlockReportProcessingThread.processQueue(BlockManager.java:5089)
      	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$BlockReportProcessingThread.run(BlockManager.java:5068)
      Caused by: com.google.protobuf.InvalidProtocolBufferException: Protocol message was too large.  May be malicious.  Use CodedInputStream.setSizeLimit() to increase the size limit.
      	at com.google.protobuf.InvalidProtocolBufferException.sizeLimitExceeded(InvalidProtocolBufferException.java:110)
      	at com.google.protobuf.CodedInputStream.refillBuffer(CodedInputStream.java:755)
      	at com.google.protobuf.CodedInputStream.readRawByte(CodedInputStream.java:769)
      	at com.google.protobuf.CodedInputStream.readRawVarint64(CodedInputStream.java:462)
      	at org.apache.hadoop.hdfs.protocol.BlockListAsLongs$BufferDecoder$1.next(BlockListAsLongs.java:420)
      	... 8 more
      
      	at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1553)
      	at org.apache.hadoop.ipc.Client.call(Client.java:1499)
      	at org.apache.hadoop.ipc.Client.call(Client.java:1396)
      	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233)
      	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
      	at com.sun.proxy.$Proxy25.blockReport(Unknown Source)
      	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.blockReport(DatanodeProtocolClientSideTranslatorPB.java:218)
      	at org.apache.hadoop.hdfs.server.datanode.TestLargeBlockReport.testBlockReportSucceedsWithLargerLengthLimit(TestLargeBlockReport.java:97)
      	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
      	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      	at java.lang.reflect.Method.invoke(Method.java:498)
      	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
      	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
      	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
      	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
      	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
      	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
      	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
      	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
      	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
      	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
      	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
      	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
      	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
      	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
      	at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
      	at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
      	at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
      	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
      	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
      	at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:384)
      	at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:345)
      	at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:126)
      	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:418)
      
      

      Ref :: https://builds.apache.org/job/PreCommit-HDFS-Build/27416/artifact/out/patch-unit-hadoop-hdfs-project_hadoop-hdfs.txt

      Attachments

        1. HDFS-14708-002.patch
          2 kB
          Lisheng Sun
        2. HDFS-14708-001.patch
          2 kB
          Lisheng Sun

        Activity

          People

            leosun08 Lisheng Sun
            leosun08 Lisheng Sun
            Votes:
            0 Vote for this issue
            Watchers:
            6 Start watching this issue

            Dates

              Created:
              Updated:
              Resolved: