diff --git common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java index 409a13a..a4bac33 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java @@ -852,6 +852,11 @@ public void testToHiveDecimalString() { Decimal128 d12 = new Decimal128(27.000, (short)3); HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.000")); assertEquals(hd7.toString(), d12.getHiveDecimalString()); - assertEquals("27", hd7.toString()); + assertEquals("27", d12.getHiveDecimalString()); + + Decimal128 d13 = new Decimal128(1234123000, (short)3); + HiveDecimal hd8 = HiveDecimal.create(new BigDecimal("1234123000")); + assertEquals(hd8.toString(), d13.getHiveDecimalString()); + assertEquals("1234123000", d13.getHiveDecimalString()); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java index 32386fe..0a445f4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java @@ -53,6 +53,8 @@ public CastDecimalToTimestamp() { @Override protected void func(LongColumnVector outV, DecimalColumnVector inV, int i) { tmp.update(inV.vector[i]); + + // Reduce scale at most by 9, therefore multiplication will not require rounding. int newScale = inV.scale > 9 ? (inV.scale - 9) : 0; tmp.multiplyDestructive(tenE9, (short) newScale); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java index 901005e..d4c00ab 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java @@ -126,11 +126,6 @@ public int getOutputColumn() { return outputColumn; } - @Override - public String getOutputType() { - return getTypeString(); - } - public long getLongValue() { return longValue; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java index 8418587..5127107 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java @@ -22,6 +22,7 @@ import java.util.List; import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; @@ -475,8 +476,7 @@ public Object evaluateOutput( else { assert(0 < myagg.count); resultCount.set (myagg.count); - int bufferIndex = myagg.sum.fastSerializeForHiveDecimal(scratch); - resultSum.set(scratch.getBytes(bufferIndex), (int) sumScale); + resultSum.set(HiveDecimal.create(myagg.sum.toBigDecimal())); return partialResult; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index c26da37..567a93d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -55,6 +55,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.RowResolver; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -421,7 +422,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, VectorizationContext vContext = null; if (op instanceof TableScanOperator) { - vContext = getVectorizationContext(op, physicalContext); + vContext = getVectorizationContext((TableScanOperator) op, physicalContext); for (String onefile : mWork.getPathToAliases().keySet()) { List aliases = mWork.getPathToAliases().get(onefile); for (String alias : aliases) { @@ -719,27 +720,20 @@ private boolean validateDataType(String type) { return supportedDataTypesPattern.matcher(type.toLowerCase()).matches(); } - private VectorizationContext getVectorizationContext(Operator op, + private VectorizationContext getVectorizationContext(TableScanOperator op, PhysicalContext pctx) { RowSchema rs = op.getSchema(); Map cmap = new HashMap(); int columnCount = 0; for (ColumnInfo c : rs.getSignature()) { - if (!c.getIsVirtualCol()) { + if (!isVirtualColumn(c)) { cmap.put(c.getInternalName(), columnCount++); } } - PrunedPartitionList partList = pctx.getParseContext().getOpToPartList().get(op); - if (partList != null) { - Table tab = partList.getSourceTable(); - if (tab.getPartitionKeys() != null) { - for (FieldSchema fs : tab.getPartitionKeys()) { - cmap.put(fs.getName(), columnCount++); - } - } - } - return new VectorizationContext(cmap, columnCount); + + VectorizationContext vc = new VectorizationContext(cmap, columnCount); + return vc; } Operator vectorizeOperator(Operator op, @@ -778,4 +772,16 @@ private VectorizationContext getVectorizationContext(Operator