diff --git common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java index 409a13a..a4bac33 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java @@ -852,6 +852,11 @@ public void testToHiveDecimalString() { Decimal128 d12 = new Decimal128(27.000, (short)3); HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.000")); assertEquals(hd7.toString(), d12.getHiveDecimalString()); - assertEquals("27", hd7.toString()); + assertEquals("27", d12.getHiveDecimalString()); + + Decimal128 d13 = new Decimal128(1234123000, (short)3); + HiveDecimal hd8 = HiveDecimal.create(new BigDecimal("1234123000")); + assertEquals(hd8.toString(), d13.getHiveDecimalString()); + assertEquals("1234123000", d13.getHiveDecimalString()); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java index 32386fe..0a445f4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java @@ -53,6 +53,8 @@ public CastDecimalToTimestamp() { @Override protected void func(LongColumnVector outV, DecimalColumnVector inV, int i) { tmp.update(inV.vector[i]); + + // Reduce scale at most by 9, therefore multiplication will not require rounding. int newScale = inV.scale > 9 ? (inV.scale - 9) : 0; tmp.multiplyDestructive(tenE9, (short) newScale); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index c26da37..66c89f0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -421,7 +421,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, VectorizationContext vContext = null; if (op instanceof TableScanOperator) { - vContext = getVectorizationContext(op, physicalContext); + vContext = getVectorizationContext((TableScanOperator) op, physicalContext); for (String onefile : mWork.getPathToAliases().keySet()) { List aliases = mWork.getPathToAliases().get(onefile); for (String alias : aliases) { @@ -719,27 +719,18 @@ private boolean validateDataType(String type) { return supportedDataTypesPattern.matcher(type.toLowerCase()).matches(); } - private VectorizationContext getVectorizationContext(Operator op, + private VectorizationContext getVectorizationContext(TableScanOperator op, PhysicalContext pctx) { RowSchema rs = op.getSchema(); Map cmap = new HashMap(); int columnCount = 0; for (ColumnInfo c : rs.getSignature()) { - if (!c.getIsVirtualCol()) { - cmap.put(c.getInternalName(), columnCount++); - } + cmap.put(c.getInternalName(), columnCount++); } - PrunedPartitionList partList = pctx.getParseContext().getOpToPartList().get(op); - if (partList != null) { - Table tab = partList.getSourceTable(); - if (tab.getPartitionKeys() != null) { - for (FieldSchema fs : tab.getPartitionKeys()) { - cmap.put(fs.getName(), columnCount++); - } - } - } - return new VectorizationContext(cmap, columnCount); + + VectorizationContext vc = new VectorizationContext(cmap, columnCount); + return vc; } Operator vectorizeOperator(Operator op,