Uploaded image for project: 'Spark'
  1. Spark
  2. SPARK-25988

Keep names unchanged when deduplicating the column names in Analyzer

    XMLWordPrintableJSON

Details

    • Bug
    • Status: Resolved
    • Major
    • Resolution: Fixed
    • 2.4.0
    • 2.4.1, 3.0.0
    • SQL
    • None

    Description

          withTempView("tmpView1", "tmpView2") {
            withTable("tab1", "tab2") {
              sql(
                """
                  |CREATE TABLE `tab1` (`col1` INT, `TDATE` DATE)
                  |USING CSV
                  |PARTITIONED BY (TDATE)
                """.stripMargin)
              spark.table("tab1").where("TDATE >= '2017-08-15'").createOrReplaceTempView("tmpView1")
              sql("CREATE TABLE `tab2` (`TDATE` DATE) USING parquet")
              sql(
                """
                  |CREATE OR REPLACE TEMPORARY VIEW tmpView2 AS
                  |SELECT N.tdate, col1 AS aliasCol1
                  |FROM tmpView1 N
                  |JOIN tab2 Z
                  |ON N.tdate = Z.tdate
                """.stripMargin)
              withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "0") {
                sql("SELECT * FROM tmpView2 x JOIN tmpView2 y ON x.tdate = y.tdate").collect()
              }
            }
          }
      

      The above code will issue the following error.

      Expected only partition pruning predicates: ArrayBuffer(isnotnull(tdate#11986), (cast(tdate#11986 as string) >= 2017-08-15));
      org.apache.spark.sql.AnalysisException: Expected only partition pruning predicates: ArrayBuffer(isnotnull(tdate#11986), (cast(tdate#11986 as string) >= 2017-08-15));
      	at org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils$.prunePartitionsByFilter(ExternalCatalogUtils.scala:146)
      	at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.listPartitionsByFilter(InMemoryCatalog.scala:560)
      	at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.listPartitionsByFilter(ExternalCatalogWithListener.scala:254)
      	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.listPartitionsByFilter(SessionCatalog.scala:958)
      	at org.apache.spark.sql.execution.datasources.CatalogFileIndex.filterPartitions(CatalogFileIndex.scala:73)
      	at org.apache.spark.sql.execution.datasources.PruneFileSourcePartitions$$anonfun$apply$1.applyOrElse(PruneFileSourcePartitions.scala:63)
      	at org.apache.spark.sql.execution.datasources.PruneFileSourcePartitions$$anonfun$apply$1.applyOrElse(PruneFileSourcePartitions.scala:27)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:256)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:256)
      	at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:255)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.transformDown(AnalysisHelper.scala:149)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:261)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:261)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:326)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:324)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:261)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.transformDown(AnalysisHelper.scala:149)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:261)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:261)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:326)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:324)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:261)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.transformDown(AnalysisHelper.scala:149)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:261)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:261)
      	at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:326)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:324)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:261)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.transformDown(AnalysisHelper.scala:149)
      	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      	at org.apache.spark.sql.execution.datasources.PruneFileSourcePartitions$.apply(PruneFileSourcePartitions.scala:27)
      	at org.apache.spark.sql.execution.datasources.PruneFileSourcePartitions$.apply(PruneFileSourcePartitions.scala:26)
      	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:89)
      	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:86)
      	at scala.collection.IndexedSeqOptimized$class.foldl(IndexedSeqOptimized.scala:57)
      	at scala.collection.IndexedSeqOptimized$class.foldLeft(IndexedSeqOptimized.scala:66)
      	at scala.collection.mutable.WrappedArray.foldLeft(WrappedArray.scala:35)
      	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:86)
      	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:78)
      	at scala.collection.immutable.List.foreach(List.scala:392)
      	at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:78)
      	at org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:66)
      	at org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:66)
      	at org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:72)
      	at org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:68)
      	at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:77)
      	at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:77)
      	at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:85)
      	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:147)
      	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:74)
      	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3248)
      	at org.apache.spark.sql.Dataset.collect(Dataset.scala:2684)
      	at org.apache.spark.sql.SQLQuerySuite$$anonfun$144$$anonfun$apply$mcV$sp$59$$anonfun$apply$mcV$sp$60$$anonfun$apply$mcV$sp$61.apply$mcV$sp(SQLQuerySuite.scala:2880)
      	at org.apache.spark.sql.catalyst.plans.SQLHelper$class.withSQLConf(SQLHelper.scala:47)
      	at org.apache.spark.sql.SQLQuerySuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(SQLQuerySuite.scala:38)
      	at org.apache.spark.sql.test.SQLTestUtilsBase$class.withSQLConf(SQLTestUtils.scala:181)
      	at org.apache.spark.sql.SQLQuerySuite.withSQLConf(SQLQuerySuite.scala:38)
      	at org.apache.spark.sql.SQLQuerySuite$$anonfun$144$$anonfun$apply$mcV$sp$59$$anonfun$apply$mcV$sp$60.apply$mcV$sp(SQLQuerySuite.scala:2879)
      	at org.apache.spark.sql.test.SQLTestUtilsBase$class.withTable(SQLTestUtils.scala:288)
      	at org.apache.spark.sql.SQLQuerySuite.withTable(SQLQuerySuite.scala:38)
      	at org.apache.spark.sql.SQLQuerySuite$$anonfun$144$$anonfun$apply$mcV$sp$59.apply$mcV$sp(SQLQuerySuite.scala:2862)
      	at org.apache.spark.sql.test.SQLTestUtilsBase$class.withTempView(SQLTestUtils.scala:262)
      	at org.apache.spark.sql.SQLQuerySuite.withTempView(SQLQuerySuite.scala:38)
      	at org.apache.spark.sql.SQLQuerySuite$$anonfun$144.apply$mcV$sp(SQLQuerySuite.scala:2861)
      	at org.apache.spark.sql.SQLQuerySuite$$anonfun$144.apply(SQLQuerySuite.scala:2861)
      	at org.apache.spark.sql.SQLQuerySuite$$anonfun$144.apply(SQLQuerySuite.scala:2861)
      	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
      	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      	at org.scalatest.Transformer.apply(Transformer.scala:22)
      	at org.scalatest.Transformer.apply(Transformer.scala:20)
      	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
      	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
      	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
      	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
      	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
      	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
      	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
      	at org.apache.spark.sql.SQLQuerySuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SQLQuerySuite.scala:38)
      	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:221)
      	at org.apache.spark.sql.SQLQuerySuite.runTest(SQLQuerySuite.scala:38)
      	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
      	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
      	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
      	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
      	at scala.collection.immutable.List.foreach(List.scala:392)
      	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
      	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
      	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
      	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
      	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
      	at org.scalatest.Suite$class.run(Suite.scala:1147)
      	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
      	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
      	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
      	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
      	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
      	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52)
      	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
      	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
      	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:52)
      	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
      	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340)
      	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334)
      	at scala.collection.immutable.List.foreach(List.scala:392)
      	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334)
      	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011)
      	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010)
      	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500)
      	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
      	at org.scalatest.tools.Runner$.run(Runner.scala:850)
      	at org.scalatest.tools.Runner.run(Runner.scala)
      	at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
      	at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
      

      Attachments

        Activity

          People

            smilegator Xiao Li
            smilegator Xiao Li
            Votes:
            0 Vote for this issue
            Watchers:
            2 Start watching this issue

            Dates

              Created:
              Updated:
              Resolved: