Details
-
Sub-task
-
Status: Resolved
-
Major
-
Resolution: Fixed
-
3.0.0
-
None
Description
On spark 3.0.0.preview2 version I found a bug who are not in 3.0.0.preview version.
With the table
spark.sql("select * from tmp").printSchema root |-- value: struct (nullable = true) | |-- array: array (nullable = true) | | |-- element: struct (containsNull = true) | | | |-- subarray: array (nullable = true) | | | | |-- element: struct (containsNull = true) | | | | | |-- key1: string (nullable = true) | | | | | |-- key2: string (nullable = true)
when you try a double lateral view explode
spark.sql("select subexplod.* from tmp lateral view explode(tmp.value.array) explod as array_explod lateral view explode(explod.array_explod.subarray) subexplod").show()
It's raising an error :
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: Binding attribute, tree: _gen_alias_127#127 at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56) at org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.applyOrElse(BoundAttribute.scala:75) at org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.applyOrElse(BoundAttribute.scala:74) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:309) at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:72) at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:309) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:314) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:399) at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:237) at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:397) at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:350) at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:314) at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:298) at org.apache.spark.sql.catalyst.expressions.BindReferences$.bindReference(BoundAttribute.scala:74) at org.apache.spark.sql.catalyst.expressions.BindReferences$.$anonfun$bindReferences$1(BoundAttribute.scala:96) at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238) at scala.collection.immutable.List.foreach(List.scala:392) at scala.collection.TraversableLike.map(TraversableLike.scala:238) at scala.collection.TraversableLike.map$(TraversableLike.scala:231) at scala.collection.immutable.List.map(List.scala:298) at org.apache.spark.sql.catalyst.expressions.BindReferences$.bindReferences(BoundAttribute.scala:96) at org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:65) at org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194) at org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149) at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:496) at org.apache.spark.sql.execution.InputRDDCodegen.doProduce(WholeStageCodegenExec.scala:483) at org.apache.spark.sql.execution.InputRDDCodegen.doProduce$(WholeStageCodegenExec.scala:456) at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:496) at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95) at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:212) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:209) at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90) at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90) at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:496) at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:51) at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95) at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:212) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:209) at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90) at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90) at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41) at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:632) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:692) at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:174) at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:212) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:209) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:170) at org.apache.spark.sql.execution.GenerateExec.doExecute(GenerateExec.scala:80) at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:174) at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:212) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:209) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:170) at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:525) at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:453) at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:452) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:496) at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:47) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:720) at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:174) at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:212) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:209) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:170) at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:315) at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:433) at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:419) at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:47) at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3537) at org.apache.spark.sql.Dataset.$anonfun$head$1(Dataset.scala:2590) at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3527) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:762) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3525) at org.apache.spark.sql.Dataset.head(Dataset.scala:2590) at org.apache.spark.sql.Dataset.take(Dataset.scala:2797) at org.apache.spark.sql.Dataset.getRows(Dataset.scala:297) at org.apache.spark.sql.Dataset.showString(Dataset.scala:334) at org.apache.spark.sql.Dataset.show(Dataset.scala:821) at org.apache.spark.sql.Dataset.show(Dataset.scala:780) at org.apache.spark.sql.Dataset.show(Dataset.scala:789) ... 47 elided Caused by: java.lang.RuntimeException: Couldn't find _gen_alias_127#127 in [array_explod#119] at scala.sys.package$.error(package.scala:30) at org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.$anonfun$applyOrElse$1(BoundAttribute.scala:81) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) ... 133 more
To recreate the bug
import org.apache.spark.sql.Row import org.apache.spark.sql.types.{StringType, StructField, StructType, ArrayType} val structValue = StructType(StructField(name = "value", StringType, nullable = false) :: Nil) val struct : StructType = StructType(StructField("array", ArrayType(StructType(StructField("subarray", ArrayType(StructType(StructField("key1",StringType, true)::StructField("key2",StringType, true)::Nil)), nullable = false) :: Nil)), nullable = false) :: Nil) val value = "{\"array\": [{\"subarray\" : [{\"key1\":\"val1\",\"key2\":\"val2\"},{\"key1\":\"val11\",\"key2\":\"val12\"}]}]}" spark.createDataFrame(spark.sparkContext.parallelize(Seq(Row(value))),structValue).withColumn("value", from_json(col("value"),struct)).createOrReplaceTempView("tmp") spark.sql("select subexplod.* from tmp lateral view explode(tmp.value.array) explod as array_explod lateral view explode(explod.array_explod.subarray) subexplod").show()
If you just avec an Array of Array of String it's working
import org.apache.spark.sql.Row import org.apache.spark.sql.types.{StringType, StructField, StructType, ArrayType} val structValue = StructType(StructField(name = "value", StringType, nullable = false) :: Nil) val struct : StructType = StructType(StructField("array", ArrayType(StructType(StructField("subarray", ArrayType(StringType, true), nullable = false) :: Nil)), nullable = false) :: Nil) val value = "{\"array\": [{\"subarray\" : [\"val1\",\"val2\"]}]}" spark.createDataFrame(spark.sparkContext.parallelize(Seq(Row(value))),structValue).withColumn("value", from_json(col("value"),struct)).createOrReplaceTempView("tmp") spark.sql("select subexplod.* from tmp lateral view explode(tmp.value.array) explod as array_explod lateral view explode(explod.array_explod.subarray) subexplod").show()
Attachments
Issue Links
- is caused by
-
SPARK-26975 Support nested-column pruning over limit/sample/repartition
- Resolved
- is duplicated by
-
SPARK-30855 Issue using 'explode' function followed by a (*)star expand selection of resulting struct
- Resolved
- links to