Details
-
Sub-task
-
Status: Resolved
-
Major
-
Resolution: Fixed
-
3.4.0
-
None
-
None
Description
https://github.com/apache/spark/runs/6912247429?check_suite_focus=true
2022-06-16T05:24:22.6921792Z [0m[[0m[0minfo[0m] [0m[0m[31m- SPARK-39061: inline should handle null struct *** FAILED *** (13 milliseconds)[0m[0m 2022-06-16T05:24:22.6927134Z [0m[[0m[0minfo[0m] [0m[0m[31m java.lang.RuntimeException: Once strategy's idempotence is broken for batch Infer Filters[0m[0m 2022-06-16T05:24:22.6928785Z [0m[[0m[0minfo[0m] [0m[0m[31m Generate inline(b#88491), [0], false, [c1#88494, c2#88495] Generate inline(b#88491), [0], false, [c1#88494, c2#88495][0m[0m 2022-06-16T05:24:22.6929841Z [0m[[0m[0minfo[0m] [0m[0m[31m!+- Filter ((size(b#88491, true) > 0) AND isnotnull(b#88491)) +- Filter isnotnull(b#88491)[0m[0m 2022-06-16T05:24:22.6930910Z [0m[[0m[0minfo[0m] [0m[0m[31m! +- Project [b#88491] +- Filter ((size(b#88491, true) > 0) AND isnotnull(b#88491))[0m[0m 2022-06-16T05:24:22.6931886Z [0m[[0m[0minfo[0m] [0m[0m[31m! +- LocalRelation [a#88490, b#88491] +- Project [b#88491][0m[0m 2022-06-16T05:24:22.6932893Z [0m[[0m[0minfo[0m] [0m[0m[31m! +- LocalRelation [a#88490, b#88491][0m[0m 2022-06-16T05:24:22.6934261Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.errors.QueryExecutionErrors$.onceStrategyIdempotenceIsBrokenForBatchError(QueryExecutionErrors.scala:1307)[0m[0m 2022-06-16T05:24:22.6935812Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.catalyst.rules.RuleExecutor.checkBatchIdempotence(RuleExecutor.scala:168)[0m[0m 2022-06-16T05:24:22.6937200Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:254)[0m[0m 2022-06-16T05:24:22.6938589Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:200)[0m[0m 2022-06-16T05:24:22.6939783Z [0m[[0m[0minfo[0m] [0m[0m[31m at scala.collection.immutable.List.foreach(List.scala:333)[0m[0m 2022-06-16T05:24:22.6941076Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:200)[0m[0m 2022-06-16T05:24:22.6942350Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:179)[0m[0m 2022-06-16T05:24:22.6943591Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)[0m[0m 2022-06-16T05:24:22.6944894Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:179)[0m[0m 2022-06-16T05:24:22.6946076Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:126)[0m[0m 2022-06-16T05:24:22.6947361Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)[0m[0m 2022-06-16T05:24:22.6948629Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:185)[0m[0m 2022-06-16T05:24:22.6950591Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:512)[0m[0m 2022-06-16T05:24:22.6951638Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:185)[0m[0m 2022-06-16T05:24:22.6952587Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)[0m[0m 2022-06-16T05:24:22.6953562Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:184)[0m[0m 2022-06-16T05:24:22.6954950Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:122)[0m[0m 2022-06-16T05:24:22.6956189Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:118)[0m[0m 2022-06-16T05:24:22.6957362Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.QueryTest.assertEmptyMissingInput(QueryTest.scala:226)[0m[0m 2022-06-16T05:24:22.6958973Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:148)[0m[0m
Attachments
Issue Links
- duplicates
-
SPARK-39520 ExpressionSetSuite test failure with Scala 2.13
- Resolved