Details
-
Bug
-
Status: Resolved
-
Blocker
-
Resolution: Workaround
-
v2.6.3
-
None
-
Hortonworks HDP 3.0.1.0-187 VirtualBox image
Description
Hello!
When I'm trying to perform integration tests I get following error:
Command:
mvn verify -fae -Dhdp.version=3.0.1.0-187 -P sandbox
Error message:
17:06:08.511 [Scheduler 727225988 Job aaebe71d-67ba-43e8-8d9e-eb7aa98b49e1-392] ERROR org.apache.kylin.engine.mr.common.HadoopShellExecutable - error execute HadoopShellExecutable{id=aaebe71d-67ba-43e8-8d9e-eb7aa98b49e1-03, name=Build Dimension Dictionary, state=RUNNING}
java.lang.ClassCastException: org.apache.hadoop.io.IntWritable cannot be cast to org.apache.hadoop.hive.serde2.io.ShortWritable
at org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector.getPrimitiveJavaObject(WritableShortObjectInspector.java:46) ~[hive-serde-3.1.0.jar:3.1.0]
at org.apache.hive.hcatalog.data.HCatRecordSerDe.serializePrimitiveField(HCatRecordSerDe.java:278) ~[hive-hcatalog-core-3.1.0.jar:3.1.0]
at org.apache.hive.hcatalog.data.HCatRecordSerDe.serializeField(HCatRecordSerDe.java:199) ~[hive-hcatalog-core-3.1.0.jar:3.1.0]
at org.apache.hive.hcatalog.data.LazyHCatRecord.get(LazyHCatRecord.java:53) ~[hive-hcatalog-core-3.1.0.jar:3.1.0]
at org.apache.hive.hcatalog.data.LazyHCatRecord.get(LazyHCatRecord.java:97) ~[hive-hcatalog-core-3.1.0.jar:3.1.0]
at org.apache.hive.hcatalog.mapreduce.HCatRecordReader.nextKeyValue(HCatRecordReader.java:204) ~[hive-hcatalog-core-3.1.0.jar:3.1.0]
at org.apache.hive.hcatalog.data.transfer.impl.HCatInputFormatReader$HCatRecordItr.hasNext(HCatInputFormatReader.java:107) ~[hive-hcatalog-core-3.1.0.jar:3.1.0]
at org.apache.kylin.source.hive.HiveTableReader.next(HiveTableReader.java:90) ~[kylin-source-hive-2.6.3.jar:2.6.3]
at org.apache.kylin.dict.lookup.SnapshotTable.takeSnapshot(SnapshotTable.java:98) ~[kylin-core-dictionary-2.6.3.jar:2.6.3]
at org.apache.kylin.dict.lookup.SnapshotManager.buildSnapshot(SnapshotManager.java:137) ~[kylin-core-dictionary-2.6.3.jar:2.6.3]
at org.apache.kylin.cube.CubeManager$DictionaryAssist.buildSnapshotTable(CubeManager.java:1202) ~[kylin-core-cube-2.6.3.jar:2.6.3]
at org.apache.kylin.cube.CubeManager.buildSnapshotTable(CubeManager.java:1117) ~[kylin-core-cube-2.6.3.jar:2.6.3]
at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:123) ~[kylin-core-cube-2.6.3.jar:2.6.3]
at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:69) ~[kylin-core-cube-2.6.3.jar:2.6.3]
at org.apache.kylin.engine.mr.steps.CreateDictionaryJob.run(CreateDictionaryJob.java:73) ~[kylin-engine-mr-2.6.3.jar:2.6.3]
at org.apache.kylin.engine.mr.MRUtil.runMRJob(MRUtil.java:93) ~[kylin-engine-mr-2.6.3.jar:2.6.3]
at org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellExecutable.java:63) [kylin-engine-mr-2.6.3.jar:2.6.3]
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167) [kylin-core-job-2.6.3.jar:2.6.3]
at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:71) [kylin-core-job-2.6.3.jar:2.6.3]
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167) [kylin-core-job-2.6.3.jar:2.6.3]
at org.apache.kylin.job.impl.threadpool.DefaultScheduler$JobRunner.run(DefaultScheduler.java:114) [kylin-core-job-2.6.3.jar:2.6.3]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_172]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_172]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_172]
17:06:08.539 [Scheduler 727225988 Job aaebe71d-67ba-43e8-8d9e-eb7aa98b49e1-392] ERROR org.apache.kylin.job.execution.AbstractExecutable - error running Executable: CubingJob{id=aaebe71d-67ba-43e8-8d9e-eb7aa98b49e1, name=BUILD CUBE - ci_inner_join_cube - 19700101000000_20120601000000 - GMT+08:00 2019-08-02 22:01:57, state=RUNNING}
17:06:08.572 [pool-12-thread-4] ERROR org.apache.kylin.job.impl.threadpool.DefaultScheduler - ExecuteException job:aaebe71d-67ba-43e8-8d9e-eb7aa98b49e1
org.apache.kylin.job.exception.ExecuteException: org.apache.kylin.job.exception.ExecuteException: org.apache.kylin.engine.mr.exception.HadoopShellException: java.lang.ClassCastException: org.apache.hadoop.io.IntWritable cannot be cast to org.apache.hadoop.hive.serde2.io.ShortWritable
at org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector.getPrimitiveJavaObject(WritableShortObjectInspector.java:46)
at org.apache.hive.hcatalog.data.HCatRecordSerDe.serializePrimitiveField(HCatRecordSerDe.java:278)
at org.apache.hive.hcatalog.data.HCatRecordSerDe.serializeField(HCatRecordSerDe.java:199)
at org.apache.hive.hcatalog.data.LazyHCatRecord.get(LazyHCatRecord.java:53)
at org.apache.hive.hcatalog.data.LazyHCatRecord.get(LazyHCatRecord.java:97)
at org.apache.hive.hcatalog.mapreduce.HCatRecordReader.nextKeyValue(HCatRecordReader.java:204)
at org.apache.hive.hcatalog.data.transfer.impl.HCatInputFormatReader$HCatRecordItr.hasNext(HCatInputFormatReader.java:107)
at org.apache.kylin.source.hive.HiveTableReader.next(HiveTableReader.java:90)
at org.apache.kylin.dict.lookup.SnapshotTable.takeSnapshot(SnapshotTable.java:98)
at org.apache.kylin.dict.lookup.SnapshotManager.buildSnapshot(SnapshotManager.java:137)
at org.apache.kylin.cube.CubeManager$DictionaryAssist.buildSnapshotTable(CubeManager.java:1202)
at org.apache.kylin.cube.CubeManager.buildSnapshotTable(CubeManager.java:1117)
at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:123)
at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:69)
at org.apache.kylin.engine.mr.steps.CreateDictionaryJob.run(CreateDictionaryJob.java:73)
at org.apache.kylin.engine.mr.MRUtil.runMRJob(MRUtil.java:93)
at org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellExecutable.java:63)
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167)
at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:71)
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167)
at org.apache.kylin.job.impl.threadpool.DefaultScheduler$JobRunner.run(DefaultScheduler.java:114)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
result code:2
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:182) ~[kylin-core-job-2.6.3.jar:2.6.3]
at org.apache.kylin.job.impl.threadpool.DefaultScheduler$JobRunner.run(DefaultScheduler.java:114) [kylin-core-job-2.6.3.jar:2.6.3]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_172]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_172]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_172]
Caused by: org.apache.kylin.job.exception.ExecuteException: org.apache.kylin.engine.mr.exception.HadoopShellException: java.lang.ClassCastException: org.apache.hadoop.io.IntWritable cannot be cast to org.apache.hadoop.hive.serde2.io.ShortWritable
at org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector.getPrimitiveJavaObject(WritableShortObjectInspector.java:46)
at org.apache.hive.hcatalog.data.HCatRecordSerDe.serializePrimitiveField(HCatRecordSerDe.java:278)
at org.apache.hive.hcatalog.data.HCatRecordSerDe.serializeField(HCatRecordSerDe.java:199)
at org.apache.hive.hcatalog.data.LazyHCatRecord.get(LazyHCatRecord.java:53)
at org.apache.hive.hcatalog.data.LazyHCatRecord.get(LazyHCatRecord.java:97)
at org.apache.hive.hcatalog.mapreduce.HCatRecordReader.nextKeyValue(HCatRecordReader.java:204)
at org.apache.hive.hcatalog.data.transfer.impl.HCatInputFormatReader$HCatRecordItr.hasNext(HCatInputFormatReader.java:107)
at org.apache.kylin.source.hive.HiveTableReader.next(HiveTableReader.java:90)
at org.apache.kylin.dict.lookup.SnapshotTable.takeSnapshot(SnapshotTable.java:98)
at org.apache.kylin.dict.lookup.SnapshotManager.buildSnapshot(SnapshotManager.java:137)
at org.apache.kylin.cube.CubeManager$DictionaryAssist.buildSnapshotTable(CubeManager.java:1202)
at org.apache.kylin.cube.CubeManager.buildSnapshotTable(CubeManager.java:1117)
at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:123)
at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:69)
at org.apache.kylin.engine.mr.steps.CreateDictionaryJob.run(CreateDictionaryJob.java:73)
at org.apache.kylin.engine.mr.MRUtil.runMRJob(MRUtil.java:93)
at org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellExecutable.java:63)
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167)
at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:71)
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167)
at org.apache.kylin.job.impl.threadpool.DefaultScheduler$JobRunner.run(DefaultScheduler.java:114)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
result code:2
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:182) ~[kylin-core-job-2.6.3.jar:2.6.3]
at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:71) ~[kylin-core-job-2.6.3.jar:2.6.3]
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167) ~[kylin-core-job-2.6.3.jar:2.6.3]
... 4 more
Caused by: org.apache.kylin.engine.mr.exception.HadoopShellException: java.lang.ClassCastException: org.apache.hadoop.io.IntWritable cannot be cast to org.apache.hadoop.hive.serde2.io.ShortWritable
at org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector.getPrimitiveJavaObject(WritableShortObjectInspector.java:46)
at org.apache.hive.hcatalog.data.HCatRecordSerDe.serializePrimitiveField(HCatRecordSerDe.java:278)
at org.apache.hive.hcatalog.data.HCatRecordSerDe.serializeField(HCatRecordSerDe.java:199)
at org.apache.hive.hcatalog.data.LazyHCatRecord.get(LazyHCatRecord.java:53)
at org.apache.hive.hcatalog.data.LazyHCatRecord.get(LazyHCatRecord.java:97)
at org.apache.hive.hcatalog.mapreduce.HCatRecordReader.nextKeyValue(HCatRecordReader.java:204)
at org.apache.hive.hcatalog.data.transfer.impl.HCatInputFormatReader$HCatRecordItr.hasNext(HCatInputFormatReader.java:107)
at org.apache.kylin.source.hive.HiveTableReader.next(HiveTableReader.java:90)
at org.apache.kylin.dict.lookup.SnapshotTable.takeSnapshot(SnapshotTable.java:98)
at org.apache.kylin.dict.lookup.SnapshotManager.buildSnapshot(SnapshotManager.java:137)
at org.apache.kylin.cube.CubeManager$DictionaryAssist.buildSnapshotTable(CubeManager.java:1202)
at org.apache.kylin.cube.CubeManager.buildSnapshotTable(CubeManager.java:1117)
at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:123)
at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:69)
at org.apache.kylin.engine.mr.steps.CreateDictionaryJob.run(CreateDictionaryJob.java:73)
at org.apache.kylin.engine.mr.MRUtil.runMRJob(MRUtil.java:93)
at org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellExecutable.java:63)
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167)
at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:71)
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167)
at org.apache.kylin.job.impl.threadpool.DefaultScheduler$JobRunner.run(DefaultScheduler.java:114)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
result code:2
at org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellExecutable.java:73) ~[kylin-engine-mr-2.6.3.jar:2.6.3]
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167) ~[kylin-core-job-2.6.3.jar:2.6.3]
at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:71) ~[kylin-core-job-2.6.3.jar:2.6.3]
at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:167) ~[kylin-core-job-2.6.3.jar:2.6.3]
... 4 more
17:06:09.939 [main] ERROR org.apache.kylin.provision.BuildCubeWithEngine - error
java.lang.RuntimeException: The test 'testInnerJoinCube' is failed.
at org.apache.kylin.provision.BuildCubeWithEngine.runTestAndAssertSucceed(BuildCubeWithEngine.java:250) [test-classes/:?]
at org.apache.kylin.provision.BuildCubeWithEngine.testCase(BuildCubeWithEngine.java:234) [test-classes/:?]
at org.apache.kylin.provision.BuildCubeWithEngine.build(BuildCubeWithEngine.java:211) [test-classes/:?]
at org.apache.kylin.provision.BuildCubeWithEngine.main(BuildCubeWithEngine.java:100) [test-classes/:?]
17:06:09.940 [main] ERROR org.apache.kylin.provision.BuildCubeWithEngine - error
java.lang.RuntimeException: The test 'testInnerJoinCube' is failed.
at org.apache.kylin.provision.BuildCubeWithEngine.runTestAndAssertSucceed(BuildCubeWithEngine.java:250) ~[test-classes/:?]
at org.apache.kylin.provision.BuildCubeWithEngine.testCase(BuildCubeWithEngine.java:234) ~[test-classes/:?]
at org.apache.kylin.provision.BuildCubeWithEngine.build(BuildCubeWithEngine.java:211) ~[test-classes/:?]
at org.apache.kylin.provision.BuildCubeWithEngine.main(BuildCubeWithEngine.java:100) [test-classes/:?]
Could you please share some suggestions on this?
Thanks!