From 7d69e2b68df56327e136cb57f2f0c39bf9b77bf2 Mon Sep 17 00:00:00 2001 From: Nishant Date: Sat, 6 Oct 2018 02:01:00 +0530 Subject: [PATCH] [HIVE-20698] Add better message for NPE when inserting rows with null timestamp to druid --- .../hadoop/hive/druid/serde/DruidSerDe.java | 8 +++-- .../hive/druid/serde/TestDruidSerDe.java | 34 +++++++++++++++++++ 2 files changed, 40 insertions(+), 2 deletions(-) diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java index 946a0753ee..cf37e37c15 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java @@ -357,9 +357,13 @@ protected SegmentAnalysis submitMetadataRequest(String address, SegmentMetadataQ assert values.size() > granularityFieldIndex; Preconditions.checkArgument( fields.get(granularityFieldIndex).getFieldName().equals(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME)); - value.put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, + + Timestamp timestamp = ((TimestampObjectInspector) fields.get(granularityFieldIndex).getFieldObjectInspector()) - .getPrimitiveJavaObject(values.get(granularityFieldIndex)).toEpochMilli()); + .getPrimitiveJavaObject(values.get(granularityFieldIndex)); + Preconditions.checkNotNull(timestamp, "Timestamp column cannot have null value"); + value.put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, timestamp.toEpochMilli()); + if (values.size() == columns.length + 2) { // Then partition number if any. final int partitionNumPos = granularityFieldIndex + 1; diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java index 8b6c890e07..acde2394d7 100644 --- a/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java +++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java @@ -74,6 +74,7 @@ import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import com.fasterxml.jackson.core.JsonParseException; @@ -90,6 +91,7 @@ import io.druid.query.select.SelectResultValue; import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.query.topn.TopNResultValue; +import org.junit.rules.ExpectedException; /** * Basic tests for Druid SerDe. The examples are taken from Druid 0.9.1.1 @@ -860,6 +862,38 @@ public void testDruidObjectSerializer() serializeObject(tbl, serDe, ROW_OBJECT, DRUID_WRITABLE); } + @Rule + public ExpectedException expectedEx = ExpectedException.none(); + + @Test + public void testDruidObjectSerializerwithNullTimestamp() + throws Exception { + // Create, initialize, and test the SerDe + DruidSerDe serDe = new DruidSerDe(); + Configuration conf = new Configuration(); + Properties tbl; + // Mixed source (all types) + tbl = createPropertiesSource(COLUMN_NAMES, COLUMN_TYPES); + SerDeUtils.initializeSerDe(serDe, conf, tbl, null); + Object[] row = new Object[] { + null, + new Text("dim1_val"), + new HiveCharWritable(new HiveChar("dim2_v", 6)), + new HiveVarcharWritable(new HiveVarchar("dim3_val", 8)), + new DoubleWritable(10669.3D), + new FloatWritable(10669.45F), + new LongWritable(1113939), + new IntWritable(1112123), + new ShortWritable((short) 12), + new ByteWritable((byte) 0), + null // granularity + }; + expectedEx.expect(NullPointerException.class); + expectedEx.expectMessage("Timestamp column cannot have null value"); + // should fail as timestamp is null + serializeObject(tbl, serDe, row, DRUID_WRITABLE); + } + private static Properties createPropertiesSource(String columnNames, String columnTypes) { Properties tbl = new Properties(); -- 2.17.1 (Apple Git-112)