Create and distribute table, cmd: hive -e "USE default; DROP TABLE IF EXISTS kylin_intermediate_kafka_test_169e762f_9c08_48c0_bfe1_63e301c1e566_fact; CREATE EXTERNAL TABLE IF NOT EXISTS kylin_intermediate_kafka_test_169e762f_9c08_48c0_bfe1_63e301c1e566_fact ( _DEVICE_NAME string ,YEAR_START date ,QUARTER_START date ,MONTH_START date ,WEEK_START date ,DAY_START date ,HOUR_START timestamp ,MINUTE_START timestamp ,VOL int ,CUR int ) STORED AS SEQUENCEFILE LOCATION 'hdfs://kylin.beacon.com:8020/kylin/kylin_metadata03/kylin-de51a851-b8f7-40be-939b-c4be0ef2ee2d/kylin_intermediate_kafka_test_169e762f_9c08_48c0_bfe1_63e301c1e566_fact'; ALTER TABLE kylin_intermediate_kafka_test_169e762f_9c08_48c0_bfe1_63e301c1e566_fact SET TBLPROPERTIES('auto.purge'='true'); DROP TABLE IF EXISTS kylin_intermediate_kafka_test_169e762f_9c08_48c0_bfe1_63e301c1e566; CREATE EXTERNAL TABLE IF NOT EXISTS kylin_intermediate_kafka_test_169e762f_9c08_48c0_bfe1_63e301c1e566 ( KEYLINDATA__DEVICE_NAME string ,KEYLINDATA_YEAR_START date ,KEYLINDATA_QUARTER_START date ,KEYLINDATA_MONTH_START date ,KEYLINDATA_WEEK_START date ,KEYLINDATA_DAY_START date ,KEYLINDATA_HOUR_START timestamp ,KEYLINDATA_MINUTE_START timestamp ,IOT_GW_PUBLIC_GW_NAME string ,IOT_GW_PUBLIC_POSITION_ID string ,IOT_GW_PUBLIC_OWNER string ,KEYLINDATA_VOL int ,KEYLINDATA_CUR int ) STORED AS SEQUENCEFILE LOCATION 'hdfs://kylin.beacon.com:8020/kylin/kylin_metadata03/kylin-de51a851-b8f7-40be-939b-c4be0ef2ee2d/kylin_intermediate_kafka_test_169e762f_9c08_48c0_bfe1_63e301c1e566'; ALTER TABLE kylin_intermediate_kafka_test_169e762f_9c08_48c0_bfe1_63e301c1e566 SET TBLPROPERTIES('auto.purge'='true'); INSERT OVERWRITE TABLE kylin_intermediate_kafka_test_169e762f_9c08_48c0_bfe1_63e301c1e566 SELECT KEYLINDATA._DEVICE_NAME as KEYLINDATA__DEVICE_NAME ,KEYLINDATA.YEAR_START as KEYLINDATA_YEAR_START ,KEYLINDATA.QUARTER_START as KEYLINDATA_QUARTER_START ,KEYLINDATA.MONTH_START as KEYLINDATA_MONTH_START ,KEYLINDATA.WEEK_START as KEYLINDATA_WEEK_START ,KEYLINDATA.DAY_START as KEYLINDATA_DAY_START ,KEYLINDATA.HOUR_START as KEYLINDATA_HOUR_START ,KEYLINDATA.MINUTE_START as KEYLINDATA_MINUTE_START ,IOT_GW_PUBLIC.GW_NAME as IOT_GW_PUBLIC_GW_NAME ,IOT_GW_PUBLIC.POSITION_ID as IOT_GW_PUBLIC_POSITION_ID ,IOT_GW_PUBLIC.OWNER as IOT_GW_PUBLIC_OWNER ,KEYLINDATA.VOL as KEYLINDATA_VOL ,KEYLINDATA.CUR as KEYLINDATA_CUR FROM kylin_intermediate_kafka_test_169e762f_9c08_48c0_bfe1_63e301c1e566_fact as KEYLINDATA INNER JOIN DB_3.IOT_GW_PUBLIC as IOT_GW_PUBLIC ON KEYLINDATA._DEVICE_NAME = IOT_GW_PUBLIC.GW_NAME WHERE 1=1; " --hiveconf hive.merge.mapredfiles=false --hiveconf hive.merge.mapfiles=false --hiveconf hive.stats.autogather=true --hiveconf hive.auto.convert.join.noconditionaltask=true --hiveconf dfs.replication=2 --hiveconf hive.auto.convert.join.noconditionaltask.size=100000000 --hiveconf hive.auto.convert.join=true --hiveconf hive.exec.compress.output=true --hiveconf mapreduce.job.split.metainfo.maxsize=-1 SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/opt/cloudera/parcels/CDH-5.15.0-1.cdh5.15.0.p0.21/jars/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/opt/apache-kylin-2.4.0-bin-cdh57/spark/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] Logging initialized using configuration in jar:file:/opt/cloudera/parcels/CDH-5.15.0-1.cdh5.15.0.p0.21/jars/hive-common-1.1.0-cdh5.15.0.jar!/hive-log4j.properties OK Time taken: 2.271 seconds OK Time taken: 0.096 seconds NoViableAltException(13@[]) at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.identifier(HiveParser_IdentifiersParser.java:11567) at org.apache.hadoop.hive.ql.parse.HiveParser.identifier(HiveParser.java:45065) at org.apache.hadoop.hive.ql.parse.HiveParser.columnNameType(HiveParser.java:38697) at org.apache.hadoop.hive.ql.parse.HiveParser.columnNameTypeList(HiveParser.java:36899) at org.apache.hadoop.hive.ql.parse.HiveParser.createTableStatement(HiveParser.java:5058) at org.apache.hadoop.hive.ql.parse.HiveParser.ddlStatement(HiveParser.java:2558) at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:1590) at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:1066) at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:201) at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:166) at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:524) at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1358) at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1475) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1287) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1277) at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:226) at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:175) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:389) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:324) at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:726) at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:699) at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:634) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.hadoop.util.RunJar.run(RunJar.java:221) at org.apache.hadoop.util.RunJar.main(RunJar.java:136) FAILED: ParseException line 4:0 cannot recognize input near '_DEVICE_NAME' 'string' ',' in column specification WARN: The method class org.apache.commons.logging.impl.SLF4JLogFactory#release() was invoked. WARN: Please see http://www.slf4j.org/codes.html#release for an explanation.