Uploaded image for project: 'Spark'
  1. Spark
  2. SPARK-6010

Exception thrown when reading Spark SQL generated Parquet files with different but compatible schemas

    XMLWordPrintableJSON

Details

    • Bug
    • Status: Resolved
    • Blocker
    • Resolution: Fixed
    • 1.3.0
    • 1.3.0
    • SQL
    • None

    Description

      The following test case added in ParquetPartitionDiscoverySuite can be used to reproduce this issue:

        test("read partitioned table - merging compatible schemas") {
          withTempDir { base =>
            makeParquetFile(
              (1 to 10).map(i => Tuple1(i)).toDF("intField"),
              makePartitionDir(base, defaultPartitionName, "pi" -> 1))
      
            makeParquetFile(
              (1 to 10).map(i => (i, i.toString)).toDF("intField", "stringField"),
              makePartitionDir(base, defaultPartitionName, "pi" -> 2))
      
            load(base.getCanonicalPath, "org.apache.spark.sql.parquet").registerTempTable("t")
      
            withTempTable("t") {
              checkAnswer(
                sql("SELECT * FROM t"),
                (1 to 10).map(i => Row(i, null, 1)) ++ (1 to 10).map(i => Row(i, i.toString, 2)))
            }
          }
        }
      

      Exception thrown:

      [info]   java.lang.RuntimeException: could not merge metadata: key org.apache.spark.sql.parquet.row.metadata has conflicting values: [{"type":"struct","fields":[{"name":"intField","type":"integer","nullable":false,"metadata":{}},{"name":"stringField","type":"string","nullable":true,"metadata":{}}]}, {"type":"struct","fields":[{"name":"intField","type":"integer","nullable":false,"metadata":{}}]}]
      [info]          at parquet.hadoop.api.InitContext.getMergedKeyValueMetaData(InitContext.java:67)
      [info]          at parquet.hadoop.api.ReadSupport.init(ReadSupport.java:84)
      [info]          at org.apache.spark.sql.parquet.FilteringParquetRowInputFormat.getSplits(ParquetTableOperations.scala:484)
      [info]          at parquet.hadoop.ParquetInputFormat.getSplits(ParquetInputFormat.java:245)
      [info]          at org.apache.spark.sql.parquet.ParquetRelation2$$anon$1.getPartitions(newParquet.scala:461)
      [info]          at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219)
      [info]          at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217)
      [info]          at scala.Option.getOrElse(Option.scala:120)
      [info]          at org.apache.spark.rdd.RDD.partitions(RDD.scala:217)
      [info]          at org.apache.spark.rdd.NewHadoopRDD$NewHadoopMapPartitionsWithSplitRDD.getPartitions(NewHadoopRDD.scala:239)
      [info]          at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219)
      [info]          at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217)
      [info]          at scala.Option.getOrElse(Option.scala:120)
      [info]          at org.apache.spark.rdd.RDD.partitions(RDD.scala:217)
      [info]          at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:32)
      [info]          at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219)
      [info]          at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217)
      [info]          at scala.Option.getOrElse(Option.scala:120)
      [info]          at org.apache.spark.rdd.RDD.partitions(RDD.scala:217)
      [info]          at org.apache.spark.SparkContext.runJob(SparkContext.scala:1518)
      [info]          at org.apache.spark.rdd.RDD.collect(RDD.scala:813)
      [info]          at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:83)
      [info]          at org.apache.spark.sql.DataFrame.collect(DataFrame.scala:790)
      [info]          at org.apache.spark.sql.QueryTest$.checkAnswer(QueryTest.scala:115)
      [info]          at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:60)
      [info]          at org.apache.spark.sql.parquet.ParquetPartitionDiscoverySuite$$anonfun$8$$anonfun$apply$mcV$sp$18$$anonfun$apply$8.apply$mcV$sp(ParquetPartitionDiscoverySuite.scala:337)
      [info]          at org.apache.spark.sql.parquet.ParquetTest$class.withTempTable(ParquetTest.scala:112)
      [info]          at org.apache.spark.sql.parquet.ParquetPartitionDiscoverySuite.withTempTable(ParquetPartitionDiscoverySuite.scala:35)
      [info]          at org.apache.spark.sql.parquet.ParquetPartitionDiscoverySuite$$anonfun$8$$anonfun$apply$mcV$sp$18.apply(ParquetPartitionDiscoverySuite.scala:336)
      [info]          at org.apache.spark.sql.parquet.ParquetPartitionDiscoverySuite$$anonfun$8$$anonfun$apply$mcV$sp$18.apply(ParquetPartitionDiscoverySuite.scala:325)
      [info]          at org.apache.spark.sql.parquet.ParquetTest$class.withTempDir(ParquetTest.scala:82)
      [info]          at org.apache.spark.sql.parquet.ParquetPartitionDiscoverySuite.withTempDir(ParquetPartitionDiscoverySuite.scala:35)
      [info]          at org.apache.spark.sql.parquet.ParquetPartitionDiscoverySuite$$anonfun$8.apply$mcV$sp(ParquetPartitionDiscoverySuite.scala:325)
      [info]          at org.apache.spark.sql.parquet.ParquetPartitionDiscoverySuite$$anonfun$8.apply(ParquetPartitionDiscoverySuite.scala:325)
      [info]          at org.apache.spark.sql.parquet.ParquetPartitionDiscoverySuite$$anonfun$8.apply(ParquetPartitionDiscoverySuite.scala:325)
      [info]          at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
      [info]          at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
      [info]          at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      [info]          at org.scalatest.Transformer.apply(Transformer.scala:22)
      [info]          at org.scalatest.Transformer.apply(Transformer.scala:20)
      [info]          at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
      [info]          at org.scalatest.Suite$class.withFixture(Suite.scala:1122)
      [info]          at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555)
      [info]          at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
      [info]          at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      [info]          at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      [info]          at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
      [info]          at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
      [info]          at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
      [info]          at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      [info]          at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      [info]          at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
      [info]          at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
      [info]          at scala.collection.immutable.List.foreach(List.scala:318)
      [info]          at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
      [info]          at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
      [info]          at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
      [info]          at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
      [info]          at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
      [info]          at org.scalatest.Suite$class.run(Suite.scala:1424)
      [info]          at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
      [info]          at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      [info]          at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      [info]          at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
      [info]          at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
      [info]          at org.scalatest.FunSuite.run(FunSuite.scala:1555)
      [info]          at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462)
      [info]          at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671)
      [info]          at sbt.ForkMain$Run$2.call(ForkMain.java:294)
      [info]          at sbt.ForkMain$Run$2.call(ForkMain.java:284)
      [info]          at java.util.concurrent.FutureTask.run(FutureTask.java:266)
      [info]          at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
      [info]          at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
      [info]          at java.lang.Thread.run(Thread.java:745) (QueryTest.scala:61)
      

      Attachments

        Activity

          People

            lian cheng Cheng Lian
            lian cheng Cheng Lian
            Votes:
            0 Vote for this issue
            Watchers:
            2 Start watching this issue

            Dates

              Created:
              Updated:
              Resolved: