diff --git checkstyle/checkstyle.xml checkstyle/checkstyle.xml
index 12e166311b..756b997cff 100644
--- checkstyle/checkstyle.xml
+++ checkstyle/checkstyle.xml
@@ -163,8 +163,12 @@
+
+
+
+
+
-
diff --git data/files/datasets/druid_table_alltypesorc/load.hive.sql data/files/datasets/druid_table_alltypesorc/load.hive.sql
index 5fde266a01..189c0aed25 100644
--- data/files/datasets/druid_table_alltypesorc/load.hive.sql
+++ data/files/datasets/druid_table_alltypesorc/load.hive.sql
@@ -18,7 +18,7 @@ OVERWRITE INTO TABLE alltypesorc1;
CREATE EXTERNAL TABLE druid_table_alltypesorc
STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
+TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "SECOND")
AS
SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,
cstring1,
@@ -32,5 +32,3 @@ SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,
cboolean1,
cboolean2
FROM alltypesorc1 where ctimestamp1 IS NOT NULL;
-
-DROP TABLE alltypesorc1;
diff --git data/scripts/q_test_cleanup.sql data/scripts/q_test_cleanup.sql
index 1c59381aa0..731062efec 100644
--- data/scripts/q_test_cleanup.sql
+++ data/scripts/q_test_cleanup.sql
@@ -24,3 +24,4 @@ DROP TABLE IF EXISTS cbo_t3;
DROP TABLE IF EXISTS src_cbo;
DROP TABLE IF EXISTS part;
DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS alltypesorc1;
diff --git druid-handler/src/java/org/apache/hadoop/hive/druid/DruidKafkaUtils.java druid-handler/src/java/org/apache/hadoop/hive/druid/DruidKafkaUtils.java
index e0e29a3c6d..c5dc1e8eba 100644
--- druid-handler/src/java/org/apache/hadoop/hive/druid/DruidKafkaUtils.java
+++ druid-handler/src/java/org/apache/hadoop/hive/druid/DruidKafkaUtils.java
@@ -134,9 +134,7 @@ static KafkaSupervisorSpec createKafkaSupervisorSpec(Table table,
builder.put(KafkaSupervisorIOConfig.BOOTSTRAP_SERVERS_KEY, kafkaServers);
for (Map.Entry entry : table.getParameters().entrySet()) {
if (entry.getKey().startsWith(DruidConstants.DRUID_KAFKA_CONSUMER_PROPERTY_PREFIX)) {
- String
- propertyName =
- entry.getKey().substring(DruidConstants.DRUID_KAFKA_CONSUMER_PROPERTY_PREFIX.length());
+ String propertyName = entry.getKey().substring(DruidConstants.DRUID_KAFKA_CONSUMER_PROPERTY_PREFIX.length());
builder.put(propertyName, entry.getValue());
}
}
@@ -178,61 +176,47 @@ static boolean isKafkaStreamingTable(Table table) {
return DruidStorageHandlerUtils.getTableProperty(table, DruidConstants.KAFKA_TOPIC) != null;
}
- static InputRowParser getInputRowParser(Table table,
- TimestampSpec timestampSpec,
- DimensionsSpec dimensionsSpec
- ) {
+ static InputRowParser getInputRowParser(Table table, TimestampSpec timestampSpec, DimensionsSpec dimensionsSpec) {
String parseSpecFormat = DruidStorageHandlerUtils.getTableProperty(table, DruidConstants.DRUID_PARSE_SPEC_FORMAT);
// Default case JSON
- if(parseSpecFormat == null || parseSpecFormat.equalsIgnoreCase("json")) {
- return new StringInputRowParser(
- new JSONParseSpec(timestampSpec,
- dimensionsSpec,
- null,
- null
- ), "UTF-8");
- } else if(parseSpecFormat.equalsIgnoreCase("csv")){
- return new StringInputRowParser(
- new CSVParseSpec(
- timestampSpec,
- dimensionsSpec,
- DruidStorageHandlerUtils.getTableProperty(table, DruidConstants.DRUID_PARSE_SPEC_LIST_DELIMITER),
- DruidStorageHandlerUtils.getListProperty(table, DruidConstants.DRUID_PARSE_SPEC_COLUMNS),
- DruidStorageHandlerUtils.getBooleanProperty(table, DruidConstants.DRUID_PARSE_SPEC_HAS_HEADER_ROWS, false),
- DruidStorageHandlerUtils.getIntegerProperty(table, DruidConstants.DRUID_PARSE_SPEC_SKIP_HEADER_ROWS, 0)
- ), "UTF-8");
- } else if (parseSpecFormat.equalsIgnoreCase("delimited")){
- return new StringInputRowParser(
- new DelimitedParseSpec(
- timestampSpec,
- dimensionsSpec,
- DruidStorageHandlerUtils.getTableProperty(table, DruidConstants.DRUID_PARSE_SPEC_DELIMITER),
- DruidStorageHandlerUtils.getTableProperty(table, DruidConstants.DRUID_PARSE_SPEC_LIST_DELIMITER),
- DruidStorageHandlerUtils.getListProperty(table, DruidConstants.DRUID_PARSE_SPEC_COLUMNS),
- DruidStorageHandlerUtils.getBooleanProperty(table, DruidConstants.DRUID_PARSE_SPEC_HAS_HEADER_ROWS, false),
- DruidStorageHandlerUtils.getIntegerProperty(table, DruidConstants.DRUID_PARSE_SPEC_SKIP_HEADER_ROWS, 0)
- ), "UTF-8");
- } else if(parseSpecFormat.equalsIgnoreCase("avro")) {
+ if ((parseSpecFormat == null) || "json".equalsIgnoreCase(parseSpecFormat)) {
+ return new StringInputRowParser(new JSONParseSpec(timestampSpec, dimensionsSpec, null, null), "UTF-8");
+ } else if ("csv".equalsIgnoreCase(parseSpecFormat)) {
+ return new StringInputRowParser(new CSVParseSpec(timestampSpec,
+ dimensionsSpec,
+ DruidStorageHandlerUtils.getTableProperty(table, DruidConstants.DRUID_PARSE_SPEC_LIST_DELIMITER),
+ DruidStorageHandlerUtils.getListProperty(table, DruidConstants.DRUID_PARSE_SPEC_COLUMNS),
+ DruidStorageHandlerUtils.getBooleanProperty(table, DruidConstants.DRUID_PARSE_SPEC_HAS_HEADER_ROWS, false),
+ DruidStorageHandlerUtils.getIntegerProperty(table, DruidConstants.DRUID_PARSE_SPEC_SKIP_HEADER_ROWS, 0)),
+ "UTF-8");
+ } else if ("delimited".equalsIgnoreCase(parseSpecFormat)) {
+ return new StringInputRowParser(new DelimitedParseSpec(timestampSpec,
+ dimensionsSpec,
+ DruidStorageHandlerUtils.getTableProperty(table, DruidConstants.DRUID_PARSE_SPEC_DELIMITER),
+ DruidStorageHandlerUtils.getTableProperty(table, DruidConstants.DRUID_PARSE_SPEC_LIST_DELIMITER),
+ DruidStorageHandlerUtils.getListProperty(table, DruidConstants.DRUID_PARSE_SPEC_COLUMNS),
+ DruidStorageHandlerUtils.getBooleanProperty(table, DruidConstants.DRUID_PARSE_SPEC_HAS_HEADER_ROWS, false),
+ DruidStorageHandlerUtils.getIntegerProperty(table, DruidConstants.DRUID_PARSE_SPEC_SKIP_HEADER_ROWS, 0)),
+ "UTF-8");
+ } else if ("avro".equalsIgnoreCase(parseSpecFormat)) {
try {
String avroSchemaLiteral = DruidStorageHandlerUtils.getTableProperty(table, DruidConstants.AVRO_SCHEMA_LITERAL);
- Preconditions.checkNotNull(avroSchemaLiteral,
- "Please specify avro schema literal when using avro parser"
- );
- Map avroSchema = JSON_MAPPER
- .readValue(avroSchemaLiteral, new TypeReference