diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 88a7cfc70e..4fac35b8ef 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -2682,9 +2682,6 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "Wait time in ms default to 30 seconds." ), HIVE_DRUID_BITMAP_FACTORY_TYPE("hive.druid.bitmap.type", "roaring", new PatternSet("roaring", "concise"), "Coding algorithm use to encode the bitmaps"), - HIVE_DRUID_APPROX_RESULT("hive.druid.approx.result", false, - "Whether to allow approximate results from druid. \n" + - "When set to true decimals will be stored as double and druid is allowed to return approximate results for decimal columns."), // For HBase storage handler HIVE_HBASE_WAL_ENABLED("hive.hbase.wal.enabled", true, "Whether writes to HBase should be forced to the write-ahead log. \n" + diff --git druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java index 93d3e5ce3b..076f00af37 100644 --- druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java +++ druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java @@ -817,8 +817,6 @@ public static IndexSpec getIndexSpec(Configuration jc) { // Default, all columns that are not metrics or timestamp, are treated as dimensions final List dimensions = new ArrayList<>(); ImmutableList.Builder aggregatorFactoryBuilder = ImmutableList.builder(); - final boolean approximationAllowed = HiveConf - .getBoolVar(jc, HiveConf.ConfVars.HIVE_DRUID_APPROX_RESULT); for (int i = 0; i < columnTypes.size(); i++) { final PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) columnTypes .get(i)).getPrimitiveCategory(); @@ -835,15 +833,10 @@ public static IndexSpec getIndexSpec(Configuration jc) { af = new DoubleSumAggregatorFactory(columnNames.get(i), columnNames.get(i)); break; case DECIMAL: - if (approximationAllowed) { - af = new DoubleSumAggregatorFactory(columnNames.get(i), columnNames.get(i)); - } else { - throw new UnsupportedOperationException( - String.format("Druid does not support decimal column type." + - "Either cast column [%s] to double or Enable Approximate Result for Druid by setting property [%s] to true", - columnNames.get(i), HiveConf.ConfVars.HIVE_DRUID_APPROX_RESULT.varname)); - } - break; + throw new UnsupportedOperationException( + String.format("Druid does not support decimal column type cast column " + + "[%s] to double", columnNames.get(i))); + case TIMESTAMP: // Granularity column String tColumnName = columnNames.get(i); diff --git druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java index d991adb088..5f7657975a 100644 --- druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java +++ druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java @@ -17,26 +17,17 @@ */ package org.apache.hadoop.hive.druid.serde; -import java.io.IOException; -import java.io.InputStream; -import java.sql.Timestamp; -import java.time.Instant; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Properties; -import java.util.stream.Collectors; - +import com.fasterxml.jackson.core.type.TypeReference; +import com.google.common.base.Function; import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import io.druid.query.Druids; +import io.druid.query.Druids.SegmentMetadataQueryBuilder; +import io.druid.query.metadata.metadata.ColumnAnalysis; +import io.druid.query.metadata.metadata.SegmentAnalysis; +import io.druid.query.metadata.metadata.SegmentMetadataQuery; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveChar; -import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.conf.Constants; @@ -53,7 +44,6 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; -import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; @@ -67,7 +57,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; @@ -92,15 +81,21 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.core.type.TypeReference; -import com.google.common.base.Function; -import com.google.common.collect.Lists; - -import io.druid.query.Druids; -import io.druid.query.Druids.SegmentMetadataQueryBuilder; -import io.druid.query.metadata.metadata.ColumnAnalysis; -import io.druid.query.metadata.metadata.SegmentAnalysis; -import io.druid.query.metadata.metadata.SegmentMetadataQuery; +import java.io.IOException; +import java.io.InputStream; +import java.sql.Timestamp; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.stream.Collectors; /** * DruidSerDe that is used to deserialize objects from a Druid data source. @@ -350,10 +345,6 @@ public Writable serialize(Object o, ObjectInspector objectInspector) throws SerD res = ((DoubleObjectInspector) fields.get(i).getFieldObjectInspector()) .get(values.get(i)); break; - case DECIMAL: - res = ((HiveDecimalObjectInspector) fields.get(i).getFieldObjectInspector()) - .getPrimitiveJavaObject(values.get(i)).doubleValue(); - break; case CHAR: res = ((HiveCharObjectInspector) fields.get(i).getFieldObjectInspector()) .getPrimitiveJavaObject(values.get(i)).getValue(); @@ -371,7 +362,7 @@ public Writable serialize(Object o, ObjectInspector objectInspector) throws SerD .get(values.get(i)); break; default: - throw new SerDeException("Unknown type: " + types[i].getPrimitiveCategory()); + throw new SerDeException("Unsupported type: " + types[i].getPrimitiveCategory()); } value.put(columns[i], res); } @@ -452,9 +443,6 @@ public Object deserialize(Writable writable) throws SerDeException { case DOUBLE: output.add(new DoubleWritable(((Number) value).doubleValue())); break; - case DECIMAL: - output.add(new HiveDecimalWritable(HiveDecimal.create(((Number) value).doubleValue()))); - break; case CHAR: output.add( new HiveCharWritable(