Uploaded image for project: 'Spark'
  1. Spark
  2. SPARK-43898

TPCDSQueryBenchmark run failed on GitHub Actions using Scala 2.13

Attach filesAttach ScreenshotVotersWatch issueWatchersCreate sub-taskLinkCloneUpdate Comment AuthorReplace String in CommentUpdate Comment VisibilityDelete Comments
    XMLWordPrintableJSON

Details

    • Improvement
    • Status: Resolved
    • Major
    • Resolution: Fixed
    • 3.5.0
    • 3.5.0
    • SQL, Tests
    • None

    Description

      https://github.com/LuciferYang/spark/actions/runs/5129288422/jobs/9226895410

       

      23/05/31 06:15:26 ERROR Utils: Exception encountered
      2909java.lang.IllegalArgumentException: Class is not registered: scala.collection.immutable.ArraySeq$ofRef
      2910Note: To register this class use: kryo.register(scala.collection.immutable.ArraySeq$ofRef.class);
      2911	at com.esotericsoftware.kryo.Kryo.getRegistration(Kryo.java:503)
      2912	at com.esotericsoftware.kryo.util.DefaultClassResolver.writeClass(DefaultClassResolver.java:97)
      2913	at com.esotericsoftware.kryo.Kryo.writeClass(Kryo.java:540)
      2914	at com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:645)
      2915	at org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:274)
      2916	at org.apache.spark.rdd.ParallelCollectionPartition.$anonfun$writeObject$2(ParallelCollectionRDD.scala:64)
      2917	at org.apache.spark.rdd.ParallelCollectionPartition.$anonfun$writeObject$2$adapted(ParallelCollectionRDD.scala:64)
      2918	at org.apache.spark.util.Utils$.serializeViaNestedStream(Utils.scala:172)
      2919	at org.apache.spark.rdd.ParallelCollectionPartition.$anonfun$writeObject$1(ParallelCollectionRDD.scala:64)
      2920	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
      2921	at org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1453)
      2922	at org.apache.spark.rdd.ParallelCollectionPartition.writeObject(ParallelCollectionRDD.scala:50)
      2923	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      2924	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
      2925	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      2926	at java.lang.reflect.Method.invoke(Method.java:498)
      2927	at java.io.ObjectStreamClass.invokeWriteObject(ObjectStreamClass.java:1154)
      2928	at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1496)
      2929	at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432)
      2930	at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178)
      2931	at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548)
      2932	at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509)
      2933	at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432)
      2934	at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178)
      2935	at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:348)
      2936	at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:46)
      2937	at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:115)
      2938	at org.apache.spark.scheduler.TaskSetManager.prepareLaunchingTask(TaskSetManager.scala:535)
      2939	at org.apache.spark.scheduler.TaskSetManager.$anonfun$resourceOffer$2(TaskSetManager.scala:499)
      2940	at scala.Option.map(Option.scala:242)
      2941	at org.apache.spark.scheduler.TaskSetManager.resourceOffer(TaskSetManager.scala:475)
      2942	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOfferSingleTaskSet$2(TaskSchedulerImpl.scala:414)
      2943	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOfferSingleTaskSet$2$adapted(TaskSchedulerImpl.scala:409)
      2944	at scala.Option.foreach(Option.scala:437)
      2945	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOfferSingleTaskSet$1(TaskSchedulerImpl.scala:409)
      2946	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:190)
      2947	at org.apache.spark.scheduler.TaskSchedulerImpl.resourceOfferSingleTaskSet(TaskSchedulerImpl.scala:399)
      2948	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOffers$20(TaskSchedulerImpl.scala:606)
      2949	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOffers$20$adapted(TaskSchedulerImpl.scala:601)
      2950	at scala.collection.ArrayOps$.foreach$extension(ArrayOps.scala:1328)
      2951	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOffers$16(TaskSchedulerImpl.scala:601)
      2952	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOffers$16$adapted(TaskSchedulerImpl.scala:574)
      2953	at scala.collection.IterableOnceOps.foreach(IterableOnce.scala:563)
      2954	at scala.collection.IterableOnceOps.foreach$(IterableOnce.scala:561)
      2955	at scala.collection.AbstractIterable.foreach(Iterable.scala:926)
      2956	at org.apache.spark.scheduler.TaskSchedulerImpl.resourceOffers(TaskSchedulerImpl.scala:574)
      2957	at org.apache.spark.scheduler.local.LocalEndpoint.reviveOffers(LocalSchedulerBackend.scala:91)
      2958	at org.apache.spark.scheduler.local.LocalEndpoint$$anonfun$receive$1.applyOrElse(LocalSchedulerBackend.scala:68)
      2959	at org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:115)
      2960	at org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)
      2961	at org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)
      2962	at org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)
      2963	at org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)
      2964	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
      2965	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
      2966	at java.lang.Thread.run(Thread.java:750)
      296723/05/31 06:15:26 ERROR TaskSetManager: Failed to serialize task 741, not attempting to retry it.
      2968java.io.IOException: java.lang.IllegalArgumentException: Class is not registered: scala.collection.immutable.ArraySeq$ofRef
      2969Note: To register this class use: kryo.register(scala.collection.immutable.ArraySeq$ofRef.class);
      2970	at org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1460)
      2971	at org.apache.spark.rdd.ParallelCollectionPartition.writeObject(ParallelCollectionRDD.scala:50)
      2972	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      2973	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
      2974	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      2975	at java.lang.reflect.Method.invoke(Method.java:498)
      2976	at java.io.ObjectStreamClass.invokeWriteObject(ObjectStreamClass.java:1154)
      2977	at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1496)
      2978	at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432)
      2979	at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178)
      2980	at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548)
      2981	at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509)
      2982	at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432)
      2983	at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178)
      2984	at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:348)
      2985	at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:46)
      2986	at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:115)
      2987	at org.apache.spark.scheduler.TaskSetManager.prepareLaunchingTask(TaskSetManager.scala:535)
      2988	at org.apache.spark.scheduler.TaskSetManager.$anonfun$resourceOffer$2(TaskSetManager.scala:499)
      2989	at scala.Option.map(Option.scala:242)
      2990	at org.apache.spark.scheduler.TaskSetManager.resourceOffer(TaskSetManager.scala:475)
      2991	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOfferSingleTaskSet$2(TaskSchedulerImpl.scala:414)
      2992	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOfferSingleTaskSet$2$adapted(TaskSchedulerImpl.scala:409)
      2993	at scala.Option.foreach(Option.scala:437)
      2994	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOfferSingleTaskSet$1(TaskSchedulerImpl.scala:409)
      2995	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:190)
      2996	at org.apache.spark.scheduler.TaskSchedulerImpl.resourceOfferSingleTaskSet(TaskSchedulerImpl.scala:399)
      2997	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOffers$20(TaskSchedulerImpl.scala:606)
      2998	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOffers$20$adapted(TaskSchedulerImpl.scala:601)
      2999	at scala.collection.ArrayOps$.foreach$extension(ArrayOps.scala:1328)
      3000	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOffers$16(TaskSchedulerImpl.scala:601)
      3001	at org.apache.spark.scheduler.TaskSchedulerImpl.$anonfun$resourceOffers$16$adapted(TaskSchedulerImpl.scala:574)
      3002	at scala.collection.IterableOnceOps.foreach(IterableOnce.scala:563)
      3003	at scala.collection.IterableOnceOps.foreach$(IterableOnce.scala:561)
      3004	at scala.collection.AbstractIterable.foreach(Iterable.scala:926)
      3005	at org.apache.spark.scheduler.TaskSchedulerImpl.resourceOffers(TaskSchedulerImpl.scala:574)
      3006	at org.apache.spark.scheduler.local.LocalEndpoint.reviveOffers(LocalSchedulerBackend.scala:91)
      3007	at org.apache.spark.scheduler.local.LocalEndpoint$$anonfun$receive$1.applyOrElse(LocalSchedulerBackend.scala:68)
      3008	at org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:115)
      3009	at org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)
      3010	at org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)
      3011	at org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)
      3012	at org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)
      3013	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
      3014	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
      3015	at java.lang.Thread.run(Thread.java:750)
      3016Caused by: java.lang.IllegalArgumentException: Class is not registered: scala.collection.immutable.ArraySeq$ofRef
      3017Note: To register this class use: kryo.register(scala.collection.immutable.ArraySeq$ofRef.class);
      3018	at com.esotericsoftware.kryo.Kryo.getRegistration(Kryo.java:503)
      3019	at com.esotericsoftware.kryo.util.DefaultClassResolver.writeClass(DefaultClassResolver.java:97)
      3020	at com.esotericsoftware.kryo.Kryo.writeClass(Kryo.java:540)
      3021	at com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:645)
      3022	at org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:274)
      3023	at org.apache.spark.rdd.ParallelCollectionPartition.$anonfun$writeObject$2(ParallelCollectionRDD.scala:64)
      3024	at org.apache.spark.rdd.ParallelCollectionPartition.$anonfun$writeObject$2$adapted(ParallelCollectionRDD.scala:64)
      3025	at org.apache.spark.util.Utils$.serializeViaNestedStream(Utils.scala:172)
      3026	at org.apache.spark.rdd.ParallelCollectionPartition.$anonfun$writeObject$1(ParallelCollectionRDD.scala:64)
      3027	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
      3028	at org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1453)
      3029	... 45 more
      303023/05/31 06:15:26 ERROR TaskSchedulerImpl: Resource offer failed, task set TaskSet_1060.0 was not serializable
      303123/05/31 06:15:26 ERROR OverwriteByExpressionExec: Data source write support org.apache.spark.sql.execution.datasources.noop.NoopBatchWrite$@45e24be0 is aborting.
      303223/05/31 06:15:26 ERROR OverwriteByExpressionExec: Data source write support org.apache.spark.sql.execution.datasources.noop.NoopBatchWrite$@45e24be0 aborted.
      3033Error: Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Failed to serialize task 741, not attempting to retry it. Exception during serialization: java.io.IOException: java.lang.IllegalArgumentException: Class is not registered: scala.collection.immutable.ArraySeq$ofRef
      3034Note: To register this class use: kryo.register(scala.collection.immutable.ArraySeq$ofRef.class);
      3035	at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2815)
      3036	at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2751)
      3037	at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2750)
      3038	at scala.collection.immutable.List.foreach(List.scala:333)
      3039	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2750)
      3040	at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1218)
      3041	at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1218)
      3042	at scala.Option.foreach(Option.scala:437)
      3043	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1218)
      3044	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3014)
      3045	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2953)
      3046	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2942)
      3047	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
      3048	at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:983)
      3049	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2285)
      3050	at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2(WriteToDataSourceV2Exec.scala:385)
      3051	at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2$(WriteToDataSourceV2Exec.scala:359)
      3052	at org.apache.spark.sql.execution.datasources.v2.OverwriteByExpressionExec.writeWithV2(WriteToDataSourceV2Exec.scala:243)
      3053	at org.apache.spark.sql.execution.datasources.v2.V2ExistingTableWriteExec.run(WriteToDataSourceV2Exec.scala:337)
      3054	at org.apache.spark.sql.execution.datasources.v2.V2ExistingTableWriteExec.run$(WriteToDataSourceV2Exec.scala:336)
      3055	at org.apache.spark.sql.execution.datasources.v2.OverwriteByExpressionExec.run(WriteToDataSourceV2Exec.scala:243)
      3056	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)
      3057	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)
      3058	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)
      3059	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)
      3060	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:118)
      3061	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:195)
      3062	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:103)
      3063	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:825)
      3064	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:65)
      3065	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
      3066	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)
      3067	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:512)
      3068	at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:104)
      3069	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:512)
      3070	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
      3071	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
      3072	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
      3073	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
      3074	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
      3075	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:488)
      3076	at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)
      3077	at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)
      3078	at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)
      3079	at org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:133)
      3080	at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:858)
      3081	at org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:318)
      3082	at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:247)
      3083	at org.apache.spark.sql.execution.benchmark.SqlBasedBenchmark$DatasetToBenchmark.noop(SqlBasedBenchmark.scala:70)
      3084	at org.apache.spark.sql.execution.benchmark.TPCDSQueryBenchmark$.$anonfun$runTpcdsQueries$5(TPCDSQueryBenchmark.scala:111)
      3085	at org.apache.spark.benchmark.Benchmark.$anonfun$addCase$1(Benchmark.scala:77)
      3086	at org.apache.spark.benchmark.Benchmark.$anonfun$addCase$1$adapted(Benchmark.scala:75)
      3087	at org.apache.spark.benchmark.Benchmark.measure(Benchmark.scala:140)
      3088	at org.apache.spark.benchmark.Benchmark.$anonfun$run$1(Benchmark.scala:106)
      3089	at scala.collection.StrictOptimizedIterableOps.map(StrictOptimizedIterableOps.scala:100)
      3090	at scala.collection.StrictOptimizedIterableOps.map$(StrictOptimizedIterableOps.scala:87)
      3091	at scala.collection.mutable.ArrayBuffer.map(ArrayBuffer.scala:43)
      3092	at org.apache.spark.benchmark.Benchmark.run(Benchmark.scala:104)
      3093	at org.apache.spark.sql.execution.benchmark.TPCDSQueryBenchmark$.$anonfun$runTpcdsQueries$1(TPCDSQueryBenchmark.scala:113)
      3094	at org.apache.spark.sql.execution.benchmark.TPCDSQueryBenchmark$.$anonfun$runTpcdsQueries$1$adapted(TPCDSQueryBenchmark.scala:91)
      3095	at scala.collection.immutable.List.foreach(List.scala:333)
      3096	at org.apache.spark.sql.execution.benchmark.TPCDSQueryBenchmark$.runTpcdsQueries(TPCDSQueryBenchmark.scala:91)
      3097	at org.apache.spark.sql.execution.benchmark.TPCDSQueryBenchmark$.runBenchmarkSuite(TPCDSQueryBenchmark.scala:185)
      3098	at org.apache.spark.benchmark.BenchmarkBase.main(BenchmarkBase.scala:72)
      3099	at org.apache.spark.sql.execution.benchmark.TPCDSQueryBenchmark.main(TPCDSQueryBenchmark.scala)
      3100	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      3101	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
      3102	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      3103	at java.lang.reflect.Method.invoke(Method.java:498)
      3104	at org.apache.spark.benchmark.Benchmarks$.$anonfun$main$7(Benchmarks.scala:128)
      3105	at scala.collection.ArrayOps$.foreach$extension(ArrayOps.scala:1328)
      3106	at org.apache.spark.benchmark.Benchmarks$.main(Benchmarks.scala:91)
      3107	at org.apache.spark.benchmark.Benchmarks.main(Benchmarks.scala)
      3108	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      3109	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
      3110	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      3111	at java.lang.reflect.Method.invoke(Method.java:498)
      3112	at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
      3113	at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1025)
      3114	at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:192)
      3115	at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:215)
      3116	at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91)
      3117	at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1116)
      3118	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1125)
      3119	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
      3120Error: Process completed with exit code 1.
      Upload benchmark results0sPost Cache TPC-DS generated data0sPost Install Java 80sPost Cache Coursier local repository0sPost Cache Scala, SBT and Maven0sPost Checkout Spark repository0sComplete job1s 

      Attachments

        Activity

          This comment will be Viewable by All Users Viewable by All Users
          Cancel

          People

            LuciferYang Yang Jie
            LuciferYang Yang Jie
            Votes:
            0 Vote for this issue
            Watchers:
            2 Start watching this issue

            Dates

              Created:
              Updated:
              Resolved:

              Slack

                Issue deployment