Uploaded image for project: 'Parquet'
  1. Parquet
  2. PARQUET-1336

PrimitiveComparator should implements Serializable

    XMLWordPrintableJSON

    Details

      Description

      [info] Cause: java.lang.RuntimeException: java.io.NotSerializableException: org.apache.parquet.schema.PrimitiveComparator$8
      [info] at org.apache.parquet.hadoop.ParquetInputFormat.setFilterPredicate(ParquetInputFormat.java:211)
      [info] at org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat$$anonfun$buildReaderWithPartitionValues$1.apply(ParquetFileFormat.scala:399)
      [info] at org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat$$anonfun$buildReaderWithPartitionValues$1.apply(ParquetFileFormat.scala:349)
      [info] at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.org$apache$spark$sql$execution$datasources$FileScanRDD$$anon$$readCurrentFile(FileScanRDD.scala:128)
      [info] at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.nextIterator(FileScanRDD.scala:182)
      [info] at org.apache.spark.sql.execution.datasources.FileScanRDD$$anon$1.hasNext(FileScanRDD.scala:109)
      [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
      [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
      [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
      [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
      [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
      [info] at org.apache.spark.util.Utils$.getIteratorSize(Utils.scala:1791)
      [info] at org.apache.spark.rdd.RDD$$anonfun$count$1.apply(RDD.scala:1162)
      [info] at org.apache.spark.rdd.RDD$$anonfun$count$1.apply(RDD.scala:1162)
      [info] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2071)
      [info] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2071)
      [info] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
      [info] at org.apache.spark.scheduler.Task.run(Task.scala:109)
      [info] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:367)
      
      

        Attachments

          Issue Links

            Activity

              People

              • Assignee:
                yumwang Yuming Wang
                Reporter:
                yumwang Yuming Wang
              • Votes:
                0 Vote for this issue
                Watchers:
                2 Start watching this issue

                Dates

                • Created:
                  Updated:
                  Resolved: