Details
-
Improvement
-
Status: Resolved
-
Major
-
Resolution: Fixed
-
3.3.0
-
None
Description
Repro code
./bin/spark-shell --master local --jars /Users/jars/iceberg-spark3-runtime-0.12.1.jar
import scala.collection.mutable import org.apache.spark.scheduler._val bytesWritten = new mutable.ArrayBuffer[Long]() val recordsWritten = new mutable.ArrayBuffer[Long]() val bytesWrittenListener = new SparkListener() { override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = { bytesWritten += taskEnd.taskMetrics.outputMetrics.bytesWritten recordsWritten += taskEnd.taskMetrics.outputMetrics.recordsWritten } } spark.sparkContext.addSparkListener(bytesWrittenListener) try { val df = spark.range(1000).toDF("id") df.write.format("iceberg").save("Users/data/dsv2_test") assert(bytesWritten.sum > 0) assert(recordsWritten.sum > 0) } finally { spark.sparkContext.removeSparkListener(bytesWrittenListener) }