Uploaded image for project: 'Spark'
  1. Spark
  2. SPARK-26452

Suppressing exception in finally: Java heap space java.lang.OutOfMemoryError: Java heap space

    XMLWordPrintableJSON

    Details

    • Type: Bug
    • Status: Resolved
    • Priority: Major
    • Resolution: Invalid
    • Affects Version/s: 2.2.0
    • Fix Version/s: None
    • Component/s: Java API
    • Labels:
      None

      Description

      In spark2.2.0 structured streaming program,the shell code of submit as follow:

      spark-submit.sh \
      --driver-memory 3g \
      --executor-nums 10 \
      --exucutor-memory 16g \
      

      Fake death occurred after running for a day,the Executor is running status, and the Driver exists also.But not task assigned ,The error message of program print as follow:

      [Stage 1852:===================================================>(896 + 3) / 900] 
      [Stage 1852:===================================================>(897 + 3) / 900] 
      [Stage 1852:===================================================>(899 + 1) / 900] 
      [Stage 1853:> (0 + 0) / 900]
      18/12/27 06:03:45 WARN util.Utils: Suppressing exception in finally: Java heap space java.lang.OutOfMemoryError: Java heap space
      at java.nio.HeapByteBuffer.<init>(HeapByteBuffer.java:57)
      at java.nio.ByteBuffer.allocate(ByteBuffer.java:335)
      at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$3.apply(TorrentBroadcast.scala:271)
      at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$3.apply(TorrentBroadcast.scala:271)
      at org.apache.spark.util.io.ChunkedByteBufferOutputStream.allocateNewChunkIfNeeded(ChunkedByteBufferOutputStream.scala:87)
      at org.apache.spark.util.io.ChunkedByteBufferOutputStream.write(ChunkedByteBufferOutputStream.scala:75)
      at net.jpountz.lz4.LZ4BlockOutputStream.flushBufferedData(LZ4BlockOutputStream.java:205)
      at net.jpountz.lz4.LZ4BlockOutputStream.write(LZ4BlockOutputStream.java:158)
      at java.io.ObjectOutputStream$BlockDataOutputStream.drain(ObjectOutputStream.java:1877)
      at java.io.ObjectOutputStream$BlockDataOutputStream.flush(ObjectOutputStream.java:1822)
      at java.io.ObjectOutputStream.flush(ObjectOutputStream.java:719)
      at java.io.ObjectOutputStream.close(ObjectOutputStream.java:740)
      at org.apache.spark.serializer.JavaSerializationStream.close(JavaSerializer.scala:57)
      at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$blockifyObject$1.apply$mcV$sp(TorrentBroadcast.scala:278)
      at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1346)
      at org.apache.spark.broadcast.TorrentBroadcast$.blockifyObject(TorrentBroadcast.scala:277)
      at org.apache.spark.broadcast.TorrentBroadcast.writeBlocks(TorrentBroadcast.scala:126)
      at org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:88)
      at org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:34)
      at org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:56)
      at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1488)
      at org.apache.spark.scheduler.DAGScheduler.submitMissingTasks(DAGScheduler.scala:1006)
      at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$submitStage(DAGScheduler.scala:930)
      at org.apache.spark.scheduler.DAGScheduler$$anonfun$submitWaitingChildStages$6.apply(DAGScheduler.scala:776)
      at org.apache.spark.scheduler.DAGScheduler$$anonfun$submitWaitingChildStages$6.apply(DAGScheduler.scala:775)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at org.apache.spark.scheduler.DAGScheduler.submitWaitingChildStages(DAGScheduler.scala:775)
      at org.apache.spark.scheduler.DAGScheduler.handleTaskCompletion(DAGScheduler.scala:1259)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1711)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1669)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1658)
      Exception in thread "dag-scheduler-event-loop" java.lang.OutOfMemoryError: Java heap space
      at java.nio.HeapByteBuffer.<init>(HeapByteBuffer.java:57)
      at java.nio.ByteBuffer.allocate(ByteBuffer.java:335)
      at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$3.apply(TorrentBroadcast.scala:271)
      at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$3.apply(TorrentBroadcast.scala:271)
      at org.apache.spark.util.io.ChunkedByteBufferOutputStream.allocateNewChunkIfNeeded(ChunkedByteBufferOutputStream.scala:87)
      at org.apache.spark.util.io.ChunkedByteBufferOutputStream.write(ChunkedByteBufferOutputStream.scala:75)
      at net.jpountz.lz4.LZ4BlockOutputStream.flushBufferedData(LZ4BlockOutputStream.java:205)
      at net.jpountz.lz4.LZ4BlockOutputStream.write(LZ4BlockOutputStream.java:158)
      at java.io.ObjectOutputStream$BlockDataOutputStream.drain(ObjectOutputStream.java:1877)
      at java.io.ObjectOutputStream$BlockDataOutputStream.setBlockDataMode(ObjectOutputStream.java:1786)
      at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1189)
      at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:348)
      at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:43)
      at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$blockifyObject$2.apply(TorrentBroadcast.scala:276)
      at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$blockifyObject$2.apply(TorrentBroadcast.scala:276)
      at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1337)
      at org.apache.spark.broadcast.TorrentBroadcast$.blockifyObject(TorrentBroadcast.scala:277)
      at org.apache.spark.broadcast.TorrentBroadcast.writeBlocks(TorrentBroadcast.scala:126)
      at org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:88)
      at org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:34)
      at org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:56)
      at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1488)
      at org.apache.spark.scheduler.DAGScheduler.submitMissingTasks(DAGScheduler.scala:1006)
      at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$submitStage(DAGScheduler.scala:930)
      at org.apache.spark.scheduler.DAGScheduler$$anonfun$submitWaitingChildStages$6.apply(DAGScheduler.scala:776)
      at org.apache.spark.scheduler.DAGScheduler$$anonfun$submitWaitingChildStages$6.apply(DAGScheduler.scala:775)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at org.apache.spark.scheduler.DAGScheduler.submitWaitingChildStages(DAGScheduler.scala:775)
      at org.apache.spark.scheduler.DAGScheduler.handleTaskCompletion(DAGScheduler.scala:1259)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1711)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1669)
      [Stage 1853:> (0 + 0) / 900]
      [Stage 1853:> (0 + 0) / 900]
      [Stage 1853:> (0 + 0) / 900]
      [Stage 1853:> (0 + 0) / 900]
      ......
      

       

        Attachments

          Activity

            People

            • Assignee:
              Unassigned
              Reporter:
              yy3b2007com tommy duan
            • Votes:
              0 Vote for this issue
              Watchers:
              3 Start watching this issue

              Dates

              • Created:
                Updated:
                Resolved: