Uploaded image for project: 'Spark'
  1. Spark
  2. SPARK-44968

Downgrade ivy to 2.5.1

    XMLWordPrintableJSON

Details

    • Improvement
    • Status: Resolved
    • Major
    • Resolution: Fixed
    • 3.5.0, 4.0.0
    • 3.5.0, 4.0.0
    • Build
    • None

    Description

      After upgrading Ivy to version 2.5.2, the daily tests for Java 11 and Java 17 began to experience aborted in the HiveExternalCatalogVersionsSuite.

      2023-08-23T23:00:49.6547573Z [info]   2023-08-23 16:00:48.209 - stdout> : java.lang.RuntimeException: problem during retrieve of org.apache.spark#spark-submit-parent-4c061f04-b951-4d06-8909-cde5452988d9: java.lang.RuntimeException: Multiple artifacts of the module log4j#log4j;1.2.17 are retrieved to the same file! Update the retrieve pattern to fix this error.
      2023-08-23T23:00:49.6548745Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.ivy.core.retrieve.RetrieveEngine.retrieve(RetrieveEngine.java:238)
      2023-08-23T23:00:49.6549572Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.ivy.core.retrieve.RetrieveEngine.retrieve(RetrieveEngine.java:89)
      2023-08-23T23:00:49.6550334Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.ivy.Ivy.retrieve(Ivy.java:551)
      2023-08-23T23:00:49.6551079Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.deploy.SparkSubmitUtils$.resolveMavenCoordinates(SparkSubmit.scala:1464)
      2023-08-23T23:00:49.6552024Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.client.IsolatedClientLoader$.$anonfun$downloadVersion$2(IsolatedClientLoader.scala:138)
      2023-08-23T23:00:49.6552884Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.util.package$.quietly(package.scala:42)
      2023-08-23T23:00:49.6553755Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.client.IsolatedClientLoader$.downloadVersion(IsolatedClientLoader.scala:138)
      2023-08-23T23:00:49.6554705Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.client.IsolatedClientLoader$.liftedTree1$1(IsolatedClientLoader.scala:65)
      2023-08-23T23:00:49.6555637Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.client.IsolatedClientLoader$.forVersion(IsolatedClientLoader.scala:64)
      2023-08-23T23:00:49.6556554Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:443)
      2023-08-23T23:00:49.6557340Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:356)
      2023-08-23T23:00:49.6558187Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:71)
      2023-08-23T23:00:49.6559061Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:70)
      2023-08-23T23:00:49.6559962Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:224)
      2023-08-23T23:00:49.6560766Z [info]   2023-08-23 16:00:48.209 - stdout>     at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
      2023-08-23T23:00:49.6561584Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:102)
      2023-08-23T23:00:49.6562510Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:224)
      2023-08-23T23:00:49.6563435Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:150)
      2023-08-23T23:00:49.6564323Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:140)
      2023-08-23T23:00:49.6565340Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:45)
      2023-08-23T23:00:49.6566321Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$1(HiveSessionStateBuilder.scala:60)
      2023-08-23T23:00:49.6567363Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog$lzycompute(SessionCatalog.scala:118)
      2023-08-23T23:00:49.6568372Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog(SessionCatalog.scala:118)
      2023-08-23T23:00:49.6569393Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.catalog.SessionCatalog.tableExists(SessionCatalog.scala:490)
      2023-08-23T23:00:49.6570685Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand.run(createDataSourceTables.scala:155)
      2023-08-23T23:00:49.6571842Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:113)
      2023-08-23T23:00:49.6572932Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:111)
      2023-08-23T23:00:49.6573996Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:125)
      2023-08-23T23:00:49.6575045Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:97)
      2023-08-23T23:00:49.6576066Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
      2023-08-23T23:00:49.6576937Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
      2023-08-23T23:00:49.6577807Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
      2023-08-23T23:00:49.6578620Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
      2023-08-23T23:00:49.6579432Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
      2023-08-23T23:00:49.6580357Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:97)
      2023-08-23T23:00:49.6581331Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:93)
      2023-08-23T23:00:49.6582239Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
      2023-08-23T23:00:49.6583101Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)
      2023-08-23T23:00:49.6584088Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
      2023-08-23T23:00:49.6585236Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
      2023-08-23T23:00:49.6586519Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
      2023-08-23T23:00:49.6587686Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
      2023-08-23T23:00:49.6588898Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
      2023-08-23T23:00:49.6590014Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
      2023-08-23T23:00:49.6590993Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457)
      2023-08-23T23:00:49.6591930Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:93)
      2023-08-23T23:00:49.6592914Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:80)
      2023-08-23T23:00:49.6593856Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:78)
      2023-08-23T23:00:49.6594687Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.Dataset.<init>(Dataset.scala:219)
      2023-08-23T23:00:49.6595379Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99)
      2023-08-23T23:00:49.6596103Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
      2023-08-23T23:00:49.6596807Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
      2023-08-23T23:00:49.6597520Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:618)
      2023-08-23T23:00:49.6598276Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
      2023-08-23T23:00:49.6599022Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:613)
      2023-08-23T23:00:49.6599819Z [info]   2023-08-23 16:00:48.209 - stdout>     at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      2023-08-23T23:00:49.6600723Z [info]   2023-08-23 16:00:48.209 - stdout>     at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
      2023-08-23T23:00:49.6601707Z [info]   2023-08-23 16:00:48.209 - stdout>     at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      2023-08-23T23:00:49.6602513Z [info]   2023-08-23 16:00:48.209 - stdout>     at java.base/java.lang.reflect.Method.invoke(Method.java:568)
      2023-08-23T23:00:49.6603272Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
      2023-08-23T23:00:49.6604007Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
      2023-08-23T23:00:49.6604724Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.Gateway.invoke(Gateway.java:282)
      2023-08-23T23:00:49.6605416Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
      2023-08-23T23:00:49.6606209Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.commands.CallCommand.execute(CallCommand.java:79)
      2023-08-23T23:00:49.6606969Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
      2023-08-23T23:00:49.6607743Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
      2023-08-23T23:00:49.6608415Z [info]   2023-08-23 16:00:48.209 - stdout>     at java.base/java.lang.Thread.run(Thread.java:833)
      2023-08-23T23:00:49.6609288Z [info]   2023-08-23 16:00:48.209 - stdout> Caused by: java.lang.RuntimeException: Multiple artifacts of the module log4j#log4j;1.2.17 are retrieved to the same file! Update the retrieve pattern to fix this error.
      2023-08-23T23:00:49.6610288Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.ivy.core.retrieve.RetrieveEngine.determineArtifactsToCopy(RetrieveEngine.java:426)
      2023-08-23T23:00:49.6611332Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.ivy.core.retrieve.RetrieveEngine.retrieve(RetrieveEngine.java:122)
      2023-08-23T23:00:49.6612046Z [info]   2023-08-23 16:00:48.209 - stdout>     ... 66 more
      2023-08-23T23:00:49.6612498Z [info]   2023-08-23 16:00:48.209 - stdout>  

      Java 11
       

      Attachments

        Issue Links

          Activity

            People

              LuciferYang Yang Jie
              LuciferYang Yang Jie
              Votes:
              0 Vote for this issue
              Watchers:
              1 Start watching this issue

              Dates

                Created:
                Updated:
                Resolved: