Details
-
Bug
-
Status: Closed
-
Minor
-
Resolution: Fixed
-
None
-
None
-
None
Description
java.lang.IllegalStateException: Error to load /model_desc/ci_inner_join_model.json
at org.apache.kylin.metadata.MetadataManager.reloadDataModelDescAt(MetadataManager.java:633)
at org.apache.kylin.metadata.MetadataManager.reloadAllDataModel(MetadataManager.java:611)
at org.apache.kylin.metadata.MetadataManager.init(MetadataManager.java:323)
at org.apache.kylin.metadata.MetadataManager.<init>(MetadataManager.java:136)
at org.apache.kylin.metadata.MetadataManager.getInstance(MetadataManager.java:94)
at org.apache.kylin.cube.model.CubeDesc.init(CubeDesc.java:589)
at org.apache.kylin.cube.CubeDescManager.loadCubeDesc(CubeDescManager.java:196)
at org.apache.kylin.cube.CubeDescManager.reloadAllCubeDesc(CubeDescManager.java:321)
at org.apache.kylin.cube.CubeDescManager.<init>(CubeDescManager.java:114)
at org.apache.kylin.cube.CubeDescManager.getInstance(CubeDescManager.java:81)
at org.apache.kylin.cube.CubeManager.reloadCubeLocalAt(CubeManager.java:809)
at org.apache.kylin.cube.CubeManager.loadAllCubeInstance(CubeManager.java:787)
at org.apache.kylin.cube.CubeManager.<init>(CubeManager.java:147)
at org.apache.kylin.cube.CubeManager.getInstance(CubeManager.java:105)
at org.apache.kylin.engine.spark.SparkCubingByLayer$EncodeBaseCuboid.call(SparkCubingByLayer.java:295)
at org.apache.kylin.engine.spark.SparkCubingByLayer$EncodeBaseCuboid.call(SparkCubingByLayer.java:276)
at org.apache.spark.api.java.JavaPairRDD$$anonfun$pairFunToScalaFun$1.apply(JavaPairRDD.scala:1043)
at org.apache.spark.api.java.JavaPairRDD$$anonfun$pairFunToScalaFun$1.apply(JavaPairRDD.scala:1043)
at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
at org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:193)
at org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:63)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
at org.apache.spark.scheduler.Task.run(Task.scala:99)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:322)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)