diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index 9d27b8d5c0..abf76a95e7 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -877,6 +877,7 @@ minillaplocal.query.files=\ vector_decimal_udf.q,\ vector_decimal64_case_when_nvl.q,\ vector_decimal64_case_when_nvl_cbo.q,\ + vector_decimal64_multi_vertex.q,\ vector_full_outer_join.q,\ vector_fullouter_mapjoin_1_fast.q,\ vector_fullouter_mapjoin_1_optimized.q,\ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java index 25573ce026..3b95835a36 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java @@ -23,6 +23,7 @@ import java.util.Iterator; import java.util.List; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator; @@ -191,12 +192,15 @@ void init(JobConf jconf, Operator reducer, boolean vectorized, TableDesc keyT keyBinarySortableDeserializeToRow = new VectorDeserializeRow( new BinarySortableDeserializeRead( - VectorizedBatchUtil.typeInfosFromStructObjectInspector( - keyStructInspector), - /* useExternalBuffer */ true, - binarySortableSerDe.getSortOrders(), - binarySortableSerDe.getNullMarkers(), - binarySortableSerDe.getNotNullMarkers())); + VectorizedBatchUtil.typeInfosFromStructObjectInspector( + keyStructInspector), + (batchContext.getRowdataTypePhysicalVariations().length > firstValueColumnOffset) + ? Arrays.copyOfRange(batchContext.getRowdataTypePhysicalVariations(), 0, + firstValueColumnOffset) : null, + /* useExternalBuffer */ true, + binarySortableSerDe.getSortOrders(), + binarySortableSerDe.getNullMarkers(), + binarySortableSerDe.getNotNullMarkers())); keyBinarySortableDeserializeToRow.init(0); final int valuesSize = valueStructInspectors.getAllStructFieldRefs().size(); @@ -205,7 +209,11 @@ void init(JobConf jconf, Operator reducer, boolean vectorized, TableDesc keyT new VectorDeserializeRow( new LazyBinaryDeserializeRead( VectorizedBatchUtil.typeInfosFromStructObjectInspector( - valueStructInspectors), + valueStructInspectors), + (batchContext.getRowdataTypePhysicalVariations().length >= totalColumns) + ? Arrays.copyOfRange(batchContext.getRowdataTypePhysicalVariations(), + firstValueColumnOffset, totalColumns) + : null, /* useExternalBuffer */ true)); valueLazyBinaryDeserializeToRow.init(firstValueColumnOffset); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnMapping.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnMapping.java index 68cb92ea51..edd073181c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnMapping.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnMapping.java @@ -20,6 +20,7 @@ import java.util.Arrays; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; /** @@ -33,6 +34,7 @@ protected int[] sourceColumns; protected int[] outputColumns; protected TypeInfo[] typeInfos; + protected DataTypePhysicalVariation[] dataTypePhysicalVariations; protected VectorColumnOrderedMap vectorColumnMapping; @@ -40,7 +42,8 @@ public VectorColumnMapping(String name) { this.vectorColumnMapping = new VectorColumnOrderedMap(name); } - public abstract void add(int sourceColumn, int outputColumn, TypeInfo typeInfo); + public abstract void add(int sourceColumn, int outputColumn, TypeInfo typeInfo, + DataTypePhysicalVariation dataTypePhysicalVariation); public abstract void finalize(); @@ -60,6 +63,10 @@ public int getCount() { return typeInfos; } + public DataTypePhysicalVariation[] getDataTypePhysicalVariations() { + return dataTypePhysicalVariations; + } + @Override public String toString() { StringBuilder sb = new StringBuilder(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOrderedMap.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOrderedMap.java index 0511b99d13..9542ebe91b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOrderedMap.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOrderedMap.java @@ -24,6 +24,7 @@ import org.apache.commons.lang3.ArrayUtils; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -47,9 +48,12 @@ TypeInfo typeInfo; - Value(int valueColumn, TypeInfo typeInfo) { + DataTypePhysicalVariation dataTypePhysicalVariation; + + Value(int valueColumn, TypeInfo typeInfo, DataTypePhysicalVariation dataTypePhysicalVariation) { this.valueColumn = valueColumn; this.typeInfo = typeInfo; + this.dataTypePhysicalVariation = dataTypePhysicalVariation; } public String toString() { @@ -65,11 +69,14 @@ public String toString() { private final int[] orderedColumns; private final int[] valueColumns; private final TypeInfo[] typeInfos; + private final DataTypePhysicalVariation[] dataTypePhysicalVariations; - Mapping(int[] orderedColumns, int[] valueColumns, TypeInfo[] typeInfos) { + Mapping(int[] orderedColumns, int[] valueColumns, TypeInfo[] typeInfos, + DataTypePhysicalVariation[] dataTypePhysicalVariations) { this.orderedColumns = orderedColumns; this.valueColumns = valueColumns; this.typeInfos = typeInfos; + this.dataTypePhysicalVariations = dataTypePhysicalVariations; } public int getCount() { @@ -87,6 +94,10 @@ public int getCount() { public TypeInfo[] getTypeInfos() { return typeInfos; } + + public DataTypePhysicalVariation[] getDataTypePhysicalVariations() { + return dataTypePhysicalVariations; + } } public VectorColumnOrderedMap(String name) { @@ -94,14 +105,15 @@ public VectorColumnOrderedMap(String name) { orderedTreeMap = new TreeMap(); } - public void add(int orderedColumn, int valueColumn, TypeInfo typeInfo) { + public void add(int orderedColumn, int valueColumn, TypeInfo typeInfo, + DataTypePhysicalVariation dataTypePhysicalVariation) { if (orderedTreeMap.containsKey(orderedColumn)) { throw new RuntimeException( name + " duplicate column " + orderedColumn + " in ordered column map " + orderedTreeMap.toString() + " when adding value column " + valueColumn + ", type into " + typeInfo.toString()); } - orderedTreeMap.put(orderedColumn, new Value(valueColumn, typeInfo)); + orderedTreeMap.put(orderedColumn, new Value(valueColumn, typeInfo, dataTypePhysicalVariation)); } public boolean orderedColumnsContain(int orderedColumn) { @@ -112,15 +124,18 @@ public Mapping getMapping() { ArrayList orderedColumns = new ArrayList(); ArrayList valueColumns = new ArrayList(); ArrayList typeInfos = new ArrayList(); + ArrayList dataTypePhysicalVariations = new ArrayList(); for (Map.Entry entry : orderedTreeMap.entrySet()) { orderedColumns.add(entry.getKey()); Value value = entry.getValue(); valueColumns.add(value.valueColumn); typeInfos.add(value.typeInfo); + dataTypePhysicalVariations.add(value.dataTypePhysicalVariation); } return new Mapping( ArrayUtils.toPrimitive(orderedColumns.toArray(new Integer[0])), ArrayUtils.toPrimitive(valueColumns.toArray(new Integer[0])), - typeInfos.toArray(new TypeInfo[0])); + typeInfos.toArray(new TypeInfo[0]), + dataTypePhysicalVariations.toArray(new DataTypePhysicalVariation[0])); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOutputMapping.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOutputMapping.java index 0a2bb25bfd..af8de9e020 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOutputMapping.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOutputMapping.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.ql.exec.vector.VectorColumnOrderedMap.Mapping; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -36,9 +37,10 @@ public VectorColumnOutputMapping(String name) { } @Override - public void add(int sourceColumn, int outputColumn, TypeInfo typeInfo) { + public void add(int sourceColumn, int outputColumn, TypeInfo typeInfo, + DataTypePhysicalVariation dataTypePhysicalVariation) { // Order on outputColumn. - vectorColumnMapping.add(outputColumn, sourceColumn, typeInfo); + vectorColumnMapping.add(outputColumn, sourceColumn, typeInfo, dataTypePhysicalVariation); } public boolean containsOutputColumn(int outputColumn) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSourceMapping.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSourceMapping.java index 60acb3bcab..425630cdc2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSourceMapping.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSourceMapping.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.ql.exec.vector.VectorColumnOrderedMap.Mapping; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -36,9 +37,9 @@ public VectorColumnSourceMapping(String name) { } @Override - public void add(int sourceColumn, int outputColumn, TypeInfo typeInfo) { + public void add(int sourceColumn, int outputColumn, TypeInfo typeInfo, DataTypePhysicalVariation dataTypePhysicalVariation) { // Order on sourceColumn. - vectorColumnMapping.add(sourceColumn, outputColumn, typeInfo); + vectorColumnMapping.add(sourceColumn, outputColumn, typeInfo, dataTypePhysicalVariation); } @Override @@ -49,6 +50,7 @@ public void finalize() { sourceColumns = mapping.getOrderedColumns(); outputColumns = mapping.getValueColumns(); typeInfos = mapping.getTypeInfos(); + dataTypePhysicalVariations = mapping.getDataTypePhysicalVariations(); // Not needed anymore. vectorColumnMapping = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index dc12d61589..eab68475cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -497,6 +497,10 @@ public void setInitialDataTypePhysicalVariations( return initialTypeInfos.toArray(new TypeInfo[0]); } + public List getInitialDataTypePhysicalVariations() { + return initialDataTypePhysicalVariations; + } + public TypeInfo getTypeInfo(int columnNum) throws HiveException { if (initialTypeInfos == null) { throw new HiveException("initialTypeInfos array is null in contextName " + contextName); @@ -528,6 +532,9 @@ public DataTypePhysicalVariation getDataTypePhysicalVariation(int columnNum) thr if (columnNum < initialDataTypePhysicalVariations.size()) { return initialDataTypePhysicalVariations.get(columnNum); } + if (columnNum < initialColumnNames.size()) { + return null; + } return ocm.getDataTypePhysicalVariation(columnNum); } @@ -834,6 +841,11 @@ public int allocateScratchColumn(TypeInfo typeInfo) throws HiveException { return ocm.allocateOutputColumn(typeInfo); } + public int allocateScratchColumn(TypeInfo typeInfo, DataTypePhysicalVariation dataTypePhysicalVariation) + throws HiveException { + return ocm.allocateOutputColumn(typeInfo, dataTypePhysicalVariation); + } + public int[] currentScratchColumns() { return ocm.currentScratchColumns(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index ebffbc1a06..71eb2b7550 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -311,7 +311,7 @@ public static void getPartitionValues(VectorizedRowBatchCtx vrbCtx, PartitionDes private ColumnVector createColumnVectorFromRowColumnTypeInfos(int columnNum) { TypeInfo typeInfo = rowColumnTypeInfos[columnNum]; final DataTypePhysicalVariation dataTypePhysicalVariation; - if (rowDataTypePhysicalVariations != null) { + if (rowDataTypePhysicalVariations != null && columnNum < rowDataTypePhysicalVariations.length) { dataTypePhysicalVariation = rowDataTypePhysicalVariations[columnNum]; } else { dataTypePhysicalVariation = DataTypePhysicalVariation.NONE; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java index 75e9f52eae..3b5f8eeedc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java @@ -577,6 +577,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { new VectorDeserializeRow( new LazyBinaryDeserializeRead( smallTableValueMapping.getTypeInfos(), + smallTableValueMapping.getDataTypePhysicalVariations(), /* useExternalBuffer */ true)); smallTableValueVectorDeserializeRow.init(smallTableValueMapping.getOutputColumns()); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 988291664e..58f9076687 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -354,6 +354,7 @@ private BaseWork currentBaseWork; private Operator currentOperator; + private DataTypePhysicalVariation[] rsDataTypePhysicalVariations; private Collection> vectorizedInputFormatExcludes; private Map, Set, Operator>>> delayedFixups = new IdentityHashMap, Set, Operator>>>(); @@ -2377,8 +2378,12 @@ private boolean getOnlyStructObjectInspectors(ReduceWork reduceWork, throw new SemanticException(e); } + vectorTaskColumnInfo.setAllColumnNames(reduceColumnNames); vectorTaskColumnInfo.setAllTypeInfos(reduceTypeInfos); + if (rsDataTypePhysicalVariations != null) { + vectorTaskColumnInfo.setAlldataTypePhysicalVariations(Arrays.asList(rsDataTypePhysicalVariations)); + } vectorTaskColumnInfo.setReduceColumnSortOrder(columnSortOrder); vectorTaskColumnInfo.setReduceColumnNullOrder(columnNullOrder); @@ -3730,13 +3735,13 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi TypeInfo typeInfo = bigTableValueTypeInfos[i]; // With this map we project the big table batch to make it look like an output batch. - projectionMapping.add(nextOutputColumn, batchColumnIndex, typeInfo); + projectionMapping.add(nextOutputColumn, batchColumnIndex, typeInfo, DataTypePhysicalVariation.NONE); // Collect columns we copy from the big table batch to the overflow batch. if (!bigTableRetainMapping.containsOutputColumn(batchColumnIndex)) { // Tolerate repeated use of a big table column. - bigTableRetainMapping.add(batchColumnIndex, batchColumnIndex, typeInfo); + bigTableRetainMapping.add(batchColumnIndex, batchColumnIndex, typeInfo, DataTypePhysicalVariation.NONE); } nextOutputColumn++; @@ -3771,7 +3776,8 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi // Optimize inner join keys of small table results. // Project the big table key into the small table result "area". - projectionMapping.add(nextOutputColumn, bigTableKeyColumn, typeInfo); + projectionMapping.add(nextOutputColumn, bigTableKeyColumn, typeInfo, + DataTypePhysicalVariation.NONE); if (!bigTableRetainMapping.containsOutputColumn(bigTableKeyColumn)) { @@ -3779,7 +3785,8 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi // Big Table key into the overflow batch so the projection of it (Big Table key) to // the Small Table key will work properly... // - nonOuterSmallTableKeyMapping.add(bigTableKeyColumn, bigTableKeyColumn, typeInfo); + nonOuterSmallTableKeyMapping.add(bigTableKeyColumn, bigTableKeyColumn, typeInfo, + DataTypePhysicalVariation.NONE); } } else { @@ -3788,13 +3795,16 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi // projection optimization used by non-[FULL} OUTER joins above. int scratchColumn = vContext.allocateScratchColumn(typeInfo); - projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo); + projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo, + DataTypePhysicalVariation.NONE); - outerSmallTableKeyMapping.add(bigTableKeyColumn, scratchColumn, typeInfo); + outerSmallTableKeyMapping.add(bigTableKeyColumn, scratchColumn, typeInfo, + DataTypePhysicalVariation.NONE); // For FULL OUTER MapJoin, we need to be able to deserialize a Small Table key // into the output result. - fullOuterSmallTableKeyMapping.add(keyIndex, scratchColumn, typeInfo); + fullOuterSmallTableKeyMapping.add(keyIndex, scratchColumn, typeInfo, + DataTypePhysicalVariation.NONE); } } else { @@ -3812,9 +3822,9 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi // Make a new big table scratch column for the small table value. int scratchColumn = vContext.allocateScratchColumn(typeInfo); - projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo); + projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo, DataTypePhysicalVariation.NONE); - smallTableValueMapping.add(smallTableValueIndex, scratchColumn, typeInfo); + smallTableValueMapping.add(smallTableValueIndex, scratchColumn, typeInfo, DataTypePhysicalVariation.NONE); } nextOutputColumn++; } @@ -3832,11 +3842,18 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi // Make a new big table scratch column for the small table value. TypeInfo typeInfo = smallTableExprNode.getTypeInfo(); - int scratchColumn = vContext.allocateScratchColumn(typeInfo); + DataTypePhysicalVariation dataTypePhysicalVariation = DataTypePhysicalVariation.NONE; + if (typeInfo instanceof DecimalTypeInfo) { + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; + if (decimalTypeInfo.getPrecision() <= 18) { + dataTypePhysicalVariation = DataTypePhysicalVariation.DECIMAL_64; + } + } + int scratchColumn = vContext.allocateScratchColumn(typeInfo, dataTypePhysicalVariation); - projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo); + projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo, dataTypePhysicalVariation); - smallTableValueMapping.add(smallTableValueIndex, scratchColumn, typeInfo); + smallTableValueMapping.add(smallTableValueIndex, scratchColumn, typeInfo, dataTypePhysicalVariation); nextOutputColumn++; } } @@ -4117,9 +4134,10 @@ private boolean canSpecializeReduceSink(ReduceSinkDesc desc, VectorExpression ve = allValueExpressions[i]; reduceSinkValueColumnMap[i] = ve.getOutputColumnNum(); reduceSinkValueTypeInfos[i] = valueDescs.get(i).getTypeInfo(); - reduceSinkValueColumnVectorTypes[i] = - VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkValueTypeInfos[i]); - if (!IdentityExpression.isColumnOnly(ve)) { + reduceSinkValueColumnVectorTypes[i] = VectorizationContext.getColumnVectorTypeFromTypeInfo( + reduceSinkValueTypeInfos[i], ve.getOutputDataTypePhysicalVariation()); + if (!IdentityExpression.isColumnOnly(ve) || + ve.getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.DECIMAL_64) { reduceSinkValueExpressionsList.add(ve); } } @@ -4676,7 +4694,8 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { ExprNodeDesc expr = colList.get(i); VectorExpression ve = vContext.getVectorExpression(expr); projectedOutputColumns[i] = ve.getOutputColumnNum(); - if (ve instanceof IdentityExpression) { + if (ve instanceof IdentityExpression + && ve.getOutputDataTypePhysicalVariation() != DataTypePhysicalVariation.DECIMAL_64) { // Suppress useless evaluation. continue; } @@ -5070,6 +5089,9 @@ private static VectorPTFInfo createVectorPTFInfo(Operator 16:int + selectExpressions: col 7:decimal(10,0)/DECIMAL_64, ConstantVectorExpression(val 11) -> 16:int Statistics: Num rows: 2 Data size: 3364 Basic stats: COMPLETE Column stats: PARTIAL File Output Operator compressed: false diff --git a/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out index ae9922e23e..d796c5e7da 100644 --- a/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out @@ -148,6 +148,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + selectExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 100 Data size: 34084 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: @@ -228,6 +229,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + selectExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 100 Data size: 34084 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col10 (type: binary) @@ -238,6 +240,7 @@ STAGE PLANS: className: VectorReduceSinkStringOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 100 Data size: 34084 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: boolean), _col7 (type: string), _col8 (type: timestamp), _col9 (type: decimal(4,2)) Execution mode: vectorized, llap diff --git a/ql/src/test/results/clientpositive/llap/vector_data_types.q.out b/ql/src/test/results/clientpositive/llap/vector_data_types.q.out index 718daea797..6e710429ed 100644 --- a/ql/src/test/results/clientpositive/llap/vector_data_types.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_data_types.q.out @@ -269,6 +269,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + selectExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1050 Data size: 357661 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int) @@ -278,6 +279,7 @@ STAGE PLANS: className: VectorReduceSinkObjectHashOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1050 Data size: 357661 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: boolean), _col7 (type: string), _col8 (type: timestamp), _col9 (type: decimal(4,2)), _col10 (type: binary) @@ -308,6 +310,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + selectExpressions: col 3:bigint/DECIMAL_64 Statistics: Num rows: 1050 Data size: 357661 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 20 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl.q.out index 2b480586f5..e6537980ff 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl.q.out @@ -73,6 +73,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 2] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(CASE WHEN ((ss_ext_discount_amt = 101)) THEN (ss_ext_list_price) ELSE (null) END) @@ -95,6 +96,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(17,2) + valueExpressions: col 0:decimal(17,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(17,2)) Execution mode: vectorized, llap @@ -126,14 +128,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 1 - dataColumns: VALUE._col0:decimal(17,2) + dataColumns: VALUE._col0:decimal(17,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(17,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(17,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: MERGEPARTIAL native: false @@ -207,6 +209,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(COALESCE(ss_ext_list_price,1.1)) @@ -229,6 +232,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(17,2) + valueExpressions: col 0:decimal(17,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(17,2)) Execution mode: vectorized, llap @@ -260,14 +264,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 1 - dataColumns: VALUE._col0:decimal(17,2) + dataColumns: VALUE._col0:decimal(17,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(17,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(17,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: MERGEPARTIAL native: false @@ -341,6 +345,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(COALESCE(ss_ext_list_price,1.1)) @@ -363,6 +368,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(17,2) + valueExpressions: col 0:decimal(17,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(17,2)) Execution mode: vectorized, llap @@ -394,14 +400,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 1 - dataColumns: VALUE._col0:decimal(17,2) + dataColumns: VALUE._col0:decimal(17,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(17,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(17,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: MERGEPARTIAL native: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl_cbo.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl_cbo.q.out index d3e6eec3fa..0c639795a9 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl_cbo.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl_cbo.q.out @@ -231,6 +231,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(17,2) + valueExpressions: col 0:decimal(17,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(17,2)) Execution mode: vectorized, llap @@ -262,14 +263,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 1 - dataColumns: VALUE._col0:decimal(17,2) + dataColumns: VALUE._col0:decimal(17,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(17,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(17,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: MERGEPARTIAL native: false @@ -366,6 +367,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(17,2) + valueExpressions: col 0:decimal(17,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(17,2)) Execution mode: vectorized, llap @@ -397,14 +399,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 1 - dataColumns: VALUE._col0:decimal(17,2) + dataColumns: VALUE._col0:decimal(17,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(17,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(17,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: MERGEPARTIAL native: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal64_multi_vertex.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal64_multi_vertex.q.out new file mode 100644 index 0000000000..b4152aeb11 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/vector_decimal64_multi_vertex.q.out @@ -0,0 +1,330 @@ +PREHOOK: query: create table store +( + s_store_sk int, + s_store_id string, + s_rec_start_date string, + s_rec_end_date string, + s_closed_date_sk int +) +row format delimited fields terminated by '\t' +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@store +POSTHOOK: query: create table store +( + s_store_sk int, + s_store_id string, + s_rec_start_date string, + s_rec_end_date string, + s_closed_date_sk int +) +row format delimited fields terminated by '\t' +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@store +PREHOOK: query: create table store_sales +( + ss_item_sk int, + ss_ext_sales_price decimal(7,2) +) +row format delimited fields terminated by '\t' +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@store_sales +POSTHOOK: query: create table store_sales +( + ss_item_sk int, + ss_ext_sales_price decimal(7,2) +) +row format delimited fields terminated by '\t' +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@store_sales +PREHOOK: query: insert into store values(1,'ramesh','ramesh','ramesh',1) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@store +POSTHOOK: query: insert into store values(1,'ramesh','ramesh','ramesh',1) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@store +POSTHOOK: Lineage: store.s_closed_date_sk SCRIPT [] +POSTHOOK: Lineage: store.s_rec_end_date SCRIPT [] +POSTHOOK: Lineage: store.s_rec_start_date SCRIPT [] +POSTHOOK: Lineage: store.s_store_id SCRIPT [] +POSTHOOK: Lineage: store.s_store_sk SCRIPT [] +PREHOOK: query: insert into store_sales values(1,1.1) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@store_sales +POSTHOOK: query: insert into store_sales values(1,1.1) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@store_sales +POSTHOOK: Lineage: store_sales.ss_ext_sales_price SCRIPT [] +POSTHOOK: Lineage: store_sales.ss_item_sk SCRIPT [] +PREHOOK: query: explain vectorization detail +select s_store_id brand_id, s_rec_start_date brand, s_rec_end_date, s_closed_date_sk, + sum(ss_ext_sales_price) ext_price + from store_sales, store + where ss_item_sk = s_store_sk + group by s_store_id, + s_rec_start_date, + s_rec_end_date, + s_closed_date_sk +PREHOOK: type: QUERY +PREHOOK: Input: default@store +PREHOOK: Input: default@store_sales +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail +select s_store_id brand_id, s_rec_start_date brand, s_rec_end_date, s_closed_date_sk, + sum(ss_ext_sales_price) ext_price + from store_sales, store + where ss_item_sk = s_store_sk + group by s_store_id, + s_rec_start_date, + s_rec_end_date, + s_closed_date_sk +POSTHOOK: type: QUERY +POSTHOOK: Input: default@store +POSTHOOK: Input: default@store_sales +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: store_sales + filterExpr: ss_item_sk is not null (type: boolean) + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:ss_item_sk:int, 1:ss_ext_sales_price:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 0:int) + predicate: ss_item_sk is not null (type: boolean) + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: ss_item_sk (type: int), ss_ext_sales_price (type: decimal(7,2)) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + selectExpressions: col 1:decimal(7,2)/DECIMAL_64 + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [1] + bigTableValueColumns: 1:decimal(7,2) + className: VectorMapJoinInnerLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + projectedOutput: 1:decimal(7,2), 3:string, 4:string, 5:string, 6:int + smallTableValueMapping: 3:string, 4:string, 5:string, 6:int + hashTableImplementationType: OPTIMIZED + outputColumnNames: _col1, _col3, _col4, _col5, _col6 + input vertices: + 1 Map 3 + Statistics: Num rows: 1 Data size: 386 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col1) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal64(col 1:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 3:string, col 4:string, col 5:string, col 6:int + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: _col3 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: int) + minReductionHashAggr: 0.0 + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 386 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: int) + null sort order: zzzz + sort order: ++++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:string, 1:string, 2:string, 3:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 4:decimal(17,2) + valueExpressions: col 4:decimal(17,2)/DECIMAL_64 + Statistics: Num rows: 1 Data size: 386 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col4 (type: decimal(17,2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: ss_item_sk:int, ss_ext_sales_price:decimal(7,2)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [string, string, string, bigint] + Map 3 + Map Operator Tree: + TableScan + alias: store + filterExpr: s_store_sk is not null (type: boolean) + Statistics: Num rows: 1 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:s_store_sk:int, 1:s_store_id:string, 2:s_rec_start_date:string, 3:s_rec_end_date:string, 4:s_closed_date_sk:int, 5:ROW__ID:struct] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 0:int) + predicate: s_store_sk is not null (type: boolean) + Statistics: Num rows: 1 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: s_store_sk (type: int), s_store_id (type: string), s_rec_start_date (type: string), s_rec_end_date (type: string), s_closed_date_sk (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4] + Statistics: Num rows: 1 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string, 2:string, 3:string, 4:int + Statistics: Num rows: 1 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 5 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: s_store_sk:int, s_store_id:string, s_rec_start_date:string, s_rec_end_date:string, s_closed_date_sk:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: zzzz + reduceColumnSortOrder: ++++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 5 + dataColumns: KEY._col0:string, KEY._col1:string, KEY._col2:string, KEY._col3:int, VALUE._col0:decimal(17,2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal(col 4:decimal(17,2)) -> decimal(17,2) + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + keyExpressions: col 0:string, col 1:string, col 2:string, col 3:int + native: false + vectorProcessingMode: MERGE_PARTIAL + projectedOutputColumnNums: [0] + keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string), KEY._col3 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 386 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 386 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select s_store_id brand_id, s_rec_start_date brand, s_rec_end_date, s_closed_date_sk, + sum(ss_ext_sales_price) ext_price + from store_sales, store + where ss_item_sk = s_store_sk + group by s_store_id, + s_rec_start_date, + s_rec_end_date, + s_closed_date_sk +PREHOOK: type: QUERY +PREHOOK: Input: default@store +PREHOOK: Input: default@store_sales +#### A masked pattern was here #### +POSTHOOK: query: select s_store_id brand_id, s_rec_start_date brand, s_rec_end_date, s_closed_date_sk, + sum(ss_ext_sales_price) ext_price + from store_sales, store + where ss_item_sk = s_store_sk + group by s_store_id, + s_rec_start_date, + s_rec_end_date, + s_closed_date_sk +POSTHOOK: type: QUERY +POSTHOOK: Input: default@store +POSTHOOK: Input: default@store_sales +#### A masked pattern was here #### +ramesh ramesh ramesh 1 1.10 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out index 2f3ffd71e5..e883515e12 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out @@ -74,6 +74,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(10,0)/DECIMAL_64 Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -194,6 +195,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(10,0)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out index e899da5c1f..ee619ea09b 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out @@ -147,6 +147,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(10,5)/DECIMAL_64 Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,5)), _col1 (type: int) @@ -292,6 +293,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(17,4)/DECIMAL_64 Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(17,4)), _col1 (type: int) @@ -695,6 +697,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 3:decimal(11,5) + valueExpressions: col 3:decimal(11,5)/DECIMAL_64 Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(11,5)) Execution mode: vectorized, llap diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out index c55f540266..76e014bbe7 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out @@ -489,6 +489,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1, 2, 3] + selectExpressions: col 1:decimal(11,5)/DECIMAL_64, col 2:decimal(16,0)/DECIMAL_64 Statistics: Num rows: 12289 Data size: 2091336 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count() @@ -516,6 +517,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:bigint, 2:decimal(11,5), 3:decimal(11,5), 4:decimal(21,5), 5:bigint, 6:decimal(16,0), 7:decimal(16,0), 8:decimal(26,0), 9:bigint + valueExpressions: col 2:decimal(11,5)/DECIMAL_64, col 3:decimal(11,5)/DECIMAL_64, col 6:decimal(16,0)/DECIMAL_64, col 7:decimal(16,0)/DECIMAL_64 Statistics: Num rows: 6105 Data size: 4267312 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint), _col2 (type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 (type: bigint), _col6 (type: decimal(16,0)), _col7 (type: decimal(16,0)), _col8 (type: decimal(26,0)), _col9 (type: bigint) Execution mode: vectorized, llap @@ -547,14 +549,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 10 - dataColumns: KEY._col0:int, VALUE._col0:bigint, VALUE._col1:decimal(11,5), VALUE._col2:decimal(11,5), VALUE._col3:decimal(21,5), VALUE._col4:bigint, VALUE._col5:decimal(16,0), VALUE._col6:decimal(16,0), VALUE._col7:decimal(26,0), VALUE._col8:bigint + dataColumns: KEY._col0:int, VALUE._col0:bigint/DECIMAL_64, VALUE._col1:decimal(11,5)/DECIMAL_64, VALUE._col2:decimal(11,5)/DECIMAL_64, VALUE._col3:decimal(21,5)/DECIMAL_64, VALUE._col4:bigint, VALUE._col5:decimal(16,0), VALUE._col6:decimal(16,0), VALUE._col7:decimal(26,0), VALUE._col8:bigint partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0), max(VALUE._col1), min(VALUE._col2), sum(VALUE._col3), count(VALUE._col4), max(VALUE._col5), min(VALUE._col6), sum(VALUE._col7), count(VALUE._col8) Group By Vectorization: - aggregators: VectorUDAFCountMerge(col 1:bigint) -> bigint, VectorUDAFMaxDecimal(col 2:decimal(11,5)) -> decimal(11,5), VectorUDAFMinDecimal(col 3:decimal(11,5)) -> decimal(11,5), VectorUDAFSumDecimal(col 4:decimal(21,5)) -> decimal(21,5), VectorUDAFCountMerge(col 5:bigint) -> bigint, VectorUDAFMaxDecimal(col 6:decimal(16,0)) -> decimal(16,0), VectorUDAFMinDecimal(col 7:decimal(16,0)) -> decimal(16,0), VectorUDAFSumDecimal(col 8:decimal(26,0)) -> decimal(26,0), VectorUDAFCountMerge(col 9:bigint) -> bigint + aggregators: VectorUDAFCountMerge(col 1:bigint/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 2:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFMinDecimal64(col 3:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 4:decimal(21,5)/DECIMAL_64) -> decimal(21,5), VectorUDAFCountMerge(col 5:bigint) -> bigint, VectorUDAFMaxDecimal(col 6:decimal(16,0)) -> decimal(16,0), VectorUDAFMinDecimal(col 7:decimal(16,0)) -> decimal(16,0), VectorUDAFSumDecimal(col 8:decimal(26,0)) -> decimal(26,0), VectorUDAFCountMerge(col 9:bigint) -> bigint className: VectorGroupByOperator groupByMode: MERGEPARTIAL keyExpressions: col 0:int @@ -579,6 +581,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] + selectExpressions: col 2:decimal(11,5)/DECIMAL_64, col 3:decimal(11,5)/DECIMAL_64 Statistics: Num rows: 2035 Data size: 1406160 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false @@ -693,7 +696,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3, 1, 2, 6, 11, 13, 18] - selectExpressions: CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, DoubleColMultiplyDoubleColumn(col 8:double, col 10:double)(children: CastDecimalToDouble(col 7:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 7:decimal(11,5)) -> 8:double, CastDecimalToDouble(col 9:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 9:decimal(11,5)) -> 10:double) -> 11:double, CastDecimalToDouble(col 12:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 12:decimal(16,0)) -> 13:double, DoubleColMultiplyDoubleColumn(col 15:double, col 17:double)(children: CastDecimalToDouble(col 14:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 14:decimal(16,0)) -> 15:double, CastDecimalToDouble(col 16:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 16:decimal(16,0)) -> 17:double) -> 18:double + selectExpressions: col 1:decimal(11,5)/DECIMAL_64, col 2:decimal(16,0)/DECIMAL_64, CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, DoubleColMultiplyDoubleColumn(col 8:double, col 10:double)(children: CastDecimalToDouble(col 7:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 7:decimal(11,5)) -> 8:double, CastDecimalToDouble(col 9:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 9:decimal(11,5)) -> 10:double) -> 11:double, CastDecimalToDouble(col 12:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 12:decimal(16,0)) -> 13:double, DoubleColMultiplyDoubleColumn(col 15:double, col 17:double)(children: CastDecimalToDouble(col 14:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 14:decimal(16,0)) -> 15:double, CastDecimalToDouble(col 16:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 16:decimal(16,0)) -> 17:double) -> 18:double Statistics: Num rows: 12289 Data size: 2091336 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count(_col1), max(_col1), min(_col1), sum(_col1), sum(_col4), sum(_col3), count(_col2), max(_col2), min(_col2), sum(_col2), sum(_col6), sum(_col5), count() @@ -721,6 +724,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:bigint, 2:decimal(11,5), 3:decimal(11,5), 4:decimal(21,5), 5:double, 6:double, 7:bigint, 8:decimal(16,0), 9:decimal(16,0), 10:decimal(26,0), 11:double, 12:double, 13:bigint + valueExpressions: col 2:decimal(11,5)/DECIMAL_64, col 3:decimal(11,5)/DECIMAL_64, col 8:decimal(16,0)/DECIMAL_64, col 9:decimal(16,0)/DECIMAL_64 Statistics: Num rows: 6105 Data size: 4462672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint), _col2 (type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: decimal(16,0)), _col9 (type: decimal(16,0)), _col10 (type: decimal(26,0)), _col11 (type: double), _col12 (type: double), _col13 (type: bigint) Execution mode: vectorized, llap @@ -752,14 +756,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 14 - dataColumns: KEY._col0:int, VALUE._col0:bigint, VALUE._col1:decimal(11,5), VALUE._col2:decimal(11,5), VALUE._col3:decimal(21,5), VALUE._col4:double, VALUE._col5:double, VALUE._col6:bigint, VALUE._col7:decimal(16,0), VALUE._col8:decimal(16,0), VALUE._col9:decimal(26,0), VALUE._col10:double, VALUE._col11:double, VALUE._col12:bigint + dataColumns: KEY._col0:int, VALUE._col0:bigint/DECIMAL_64, VALUE._col1:decimal(11,5)/DECIMAL_64, VALUE._col2:decimal(11,5)/DECIMAL_64, VALUE._col3:decimal(21,5)/DECIMAL_64, VALUE._col4:double, VALUE._col5:double, VALUE._col6:bigint, VALUE._col7:decimal(16,0), VALUE._col8:decimal(16,0), VALUE._col9:decimal(26,0), VALUE._col10:double, VALUE._col11:double, VALUE._col12:bigint partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0), max(VALUE._col1), min(VALUE._col2), sum(VALUE._col3), sum(VALUE._col4), sum(VALUE._col5), count(VALUE._col6), max(VALUE._col7), min(VALUE._col8), sum(VALUE._col9), sum(VALUE._col10), sum(VALUE._col11), count(VALUE._col12) Group By Vectorization: - aggregators: VectorUDAFCountMerge(col 1:bigint) -> bigint, VectorUDAFMaxDecimal(col 2:decimal(11,5)) -> decimal(11,5), VectorUDAFMinDecimal(col 3:decimal(11,5)) -> decimal(11,5), VectorUDAFSumDecimal(col 4:decimal(21,5)) -> decimal(21,5), VectorUDAFSumDouble(col 5:double) -> double, VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFCountMerge(col 7:bigint) -> bigint, VectorUDAFMaxDecimal(col 8:decimal(16,0)) -> decimal(16,0), VectorUDAFMinDecimal(col 9:decimal(16,0)) -> decimal(16,0), VectorUDAFSumDecimal(col 10:decimal(26,0)) -> decimal(26,0), VectorUDAFSumDouble(col 11:double) -> double, VectorUDAFSumDouble(col 12:double) -> double, VectorUDAFCountMerge(col 13:bigint) -> bigint + aggregators: VectorUDAFCountMerge(col 1:bigint/DECIMAL_64) -> bigint, VectorUDAFMaxDecimal64(col 2:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFMinDecimal64(col 3:decimal(11,5)/DECIMAL_64) -> decimal(11,5)/DECIMAL_64, VectorUDAFSumDecimal64ToDecimal(col 4:decimal(21,5)/DECIMAL_64) -> decimal(21,5), VectorUDAFSumDouble(col 5:double) -> double, VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFCountMerge(col 7:bigint) -> bigint, VectorUDAFMaxDecimal(col 8:decimal(16,0)) -> decimal(16,0), VectorUDAFMinDecimal(col 9:decimal(16,0)) -> decimal(16,0), VectorUDAFSumDecimal(col 10:decimal(26,0)) -> decimal(26,0), VectorUDAFSumDouble(col 11:double) -> double, VectorUDAFSumDouble(col 12:double) -> double, VectorUDAFCountMerge(col 13:bigint) -> bigint className: VectorGroupByOperator groupByMode: MERGEPARTIAL keyExpressions: col 0:int @@ -784,7 +788,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 16, 21, 29, 7, 8, 9, 10, 32, 37, 45] - selectExpressions: CastDecimalToDecimal(col 15:decimal(38,22))(children: DecimalColDivideDecimalColumn(col 4:decimal(21,5), col 14:decimal(19,0))(children: CastLongToDecimal(col 1:bigint) -> 14:decimal(19,0)) -> 15:decimal(38,22)) -> 16:decimal(15,9), FuncPowerDoubleToDouble(col 20:double)(children: DoubleColDivideLongColumn(col 19:double, col 1:bigint)(children: DoubleColSubtractDoubleColumn(col 5:double, col 18:double)(children: DoubleColDivideLongColumn(col 17:double, col 1:bigint)(children: DoubleColMultiplyDoubleColumn(col 6:double, col 6:double) -> 17:double) -> 18:double) -> 19:double) -> 20:double) -> 21:double, FuncPowerDoubleToDouble(col 28:double)(children: DoubleColDivideLongColumn(col 24:double, col 27:bigint)(children: DoubleColSubtractDoubleColumn(col 5:double, col 23:double)(children: DoubleColDivideLongColumn(col 22:double, col 1:bigint)(children: DoubleColMultiplyDoubleColumn(col 6:double, col 6:double) -> 22:double) -> 23:double) -> 24:double, IfExprNullCondExpr(col 25:boolean, null, col 26:bigint)(children: LongColEqualLongScalar(col 1:bigint, val 1) -> 25:boolean, LongColSubtractLongScalar(col 1:bigint, val 1) -> 26:bigint) -> 27:bigint) -> 28:double) -> 29:double, CastDecimalToDecimal(col 31:decimal(38,12))(children: DecimalColDivideDecimalColumn(col 10:decimal(26,0), col 30:decimal(19,0))(children: CastLongToDecimal(col 7:bigint) -> 30:decimal(19,0)) -> 31:decimal(38,12)) -> 32:decimal(20,4), FuncPowerDoubleToDouble(col 36:double)(children: DoubleColDivideLongColumn(col 35:double, col 7:bigint)(children: DoubleColSubtractDoubleColumn(col 11:double, col 34:double)(children: DoubleColDivideLongColumn(col 33:double, col 7:bigint)(children: DoubleColMultiplyDoubleColumn(col 12:double, col 12:double) -> 33:double) -> 34:double) -> 35:double) -> 36:double) -> 37:double, FuncPowerDoubleToDouble(col 44:double)(children: DoubleColDivideLongColumn(col 40:double, col 43:bigint)(children: DoubleColSubtractDoubleColumn(col 11:double, col 39:double)(children: DoubleColDivideLongColumn(col 38:double, col 7:bigint)(children: DoubleColMultiplyDoubleColumn(col 12:double, col 12:double) -> 38:double) -> 39:double) -> 40:double, IfExprNullCondExpr(col 41:boolean, null, col 42:bigint)(children: LongColEqualLongScalar(col 7:bigint, val 1) -> 41:boolean, LongColSubtractLongScalar(col 7:bigint, val 1) -> 42:bigint) -> 43:bigint) -> 44:double) -> 45:double + selectExpressions: col 2:decimal(11,5)/DECIMAL_64, col 3:decimal(11,5)/DECIMAL_64, CastDecimalToDecimal(col 15:decimal(38,22))(children: DecimalColDivideDecimalColumn(col 4:decimal(21,5), col 14:decimal(19,0))(children: CastLongToDecimal(col 1:bigint) -> 14:decimal(19,0)) -> 15:decimal(38,22)) -> 16:decimal(15,9), FuncPowerDoubleToDouble(col 20:double)(children: DoubleColDivideLongColumn(col 19:double, col 1:bigint)(children: DoubleColSubtractDoubleColumn(col 5:double, col 18:double)(children: DoubleColDivideLongColumn(col 17:double, col 1:bigint)(children: DoubleColMultiplyDoubleColumn(col 6:double, col 6:double) -> 17:double) -> 18:double) -> 19:double) -> 20:double) -> 21:double, FuncPowerDoubleToDouble(col 28:double)(children: DoubleColDivideLongColumn(col 24:double, col 27:bigint)(children: DoubleColSubtractDoubleColumn(col 5:double, col 23:double)(children: DoubleColDivideLongColumn(col 22:double, col 1:bigint)(children: DoubleColMultiplyDoubleColumn(col 6:double, col 6:double) -> 22:double) -> 23:double) -> 24:double, IfExprNullCondExpr(col 25:boolean, null, col 26:bigint)(children: LongColEqualLongScalar(col 1:bigint, val 1) -> 25:boolean, LongColSubtractLongScalar(col 1:bigint, val 1) -> 26:bigint) -> 27:bigint) -> 28:double) -> 29:double, CastDecimalToDecimal(col 31:decimal(38,12))(children: DecimalColDivideDecimalColumn(col 10:decimal(26,0), col 30:decimal(19,0))(children: CastLongToDecimal(col 7:bigint) -> 30:decimal(19,0)) -> 31:decimal(38,12)) -> 32:decimal(20,4), FuncPowerDoubleToDouble(col 36:double)(children: DoubleColDivideLongColumn(col 35:double, col 7:bigint)(children: DoubleColSubtractDoubleColumn(col 11:double, col 34:double)(children: DoubleColDivideLongColumn(col 33:double, col 7:bigint)(children: DoubleColMultiplyDoubleColumn(col 12:double, col 12:double) -> 33:double) -> 34:double) -> 35:double) -> 36:double) -> 37:double, FuncPowerDoubleToDouble(col 44:double)(children: DoubleColDivideLongColumn(col 40:double, col 43:bigint)(children: DoubleColSubtractDoubleColumn(col 11:double, col 39:double)(children: DoubleColDivideLongColumn(col 38:double, col 7:bigint)(children: DoubleColMultiplyDoubleColumn(col 12:double, col 12:double) -> 38:double) -> 39:double) -> 40:double, IfExprNullCondExpr(col 41:boolean, null, col 42:bigint)(children: LongColEqualLongScalar(col 7:bigint, val 1) -> 41:boolean, LongColSubtractLongScalar(col 7:bigint, val 1) -> 42:bigint) -> 43:bigint) -> 44:double) -> 45:double Statistics: Num rows: 2035 Data size: 1927120 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_join.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_join.q.out index a2aeb20f0e..ad25053c6d 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_join.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_join.q.out @@ -64,6 +64,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 1:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: @@ -106,6 +107,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(17,2) + valueExpressions: col 0:decimal(17,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(17,2)) Execution mode: vectorized, llap @@ -189,14 +191,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 1 - dataColumns: VALUE._col0:decimal(17,2) + dataColumns: VALUE._col0:decimal(17,2)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) Group By Vectorization: - aggregators: VectorUDAFSumDecimal(col 0:decimal(17,2)) -> decimal(17,2) + aggregators: VectorUDAFSumDecimal64(col 0:decimal(17,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 className: VectorGroupByOperator groupByMode: MERGEPARTIAL native: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out index c40d787a62..6c31d7fe49 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out @@ -774,6 +774,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(14,2)/DECIMAL_64 Statistics: Num rows: 997 Data size: 106235 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: @@ -846,6 +847,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(14,0)/DECIMAL_64 Statistics: Num rows: 997 Data size: 106235 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(16,2)) @@ -1060,6 +1062,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(14,2)/DECIMAL_64, col 1:decimal(14,2)/DECIMAL_64 Statistics: Num rows: 997 Data size: 212470 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: @@ -1133,6 +1136,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(14,0)/DECIMAL_64, col 1:decimal(14,0)/DECIMAL_64 Statistics: Num rows: 997 Data size: 212470 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(16,2)) @@ -1145,6 +1149,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:decimal(14,0) + valueExpressions: col 1:decimal(14,0)/DECIMAL_64 Statistics: Num rows: 997 Data size: 212470 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: decimal(14,0)) Execution mode: vectorized, llap diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out index 648eb83c34..79d1ba01ce 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out @@ -389,7 +389,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [2, 10, 12, 14, 16, 20, 23, 26, 29, 32, 35, 39, 43, 46, 48, 51, 54, 57, 60, 63, 66, 69, 2, 71, 73, 80] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 9:decimal(12,4), decimalPlaces 2)(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 9:decimal(12,4)) -> 10:decimal(11,2), FuncRoundDecimalToDecimal(col 11:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 11:decimal(12,4)) -> 12:decimal(9,0), FuncFloorDecimalToDecimal(col 13:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 13:decimal(12,4)) -> 14:decimal(9,0), FuncCeilDecimalToDecimal(col 15:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 15:decimal(12,4)) -> 16:decimal(9,0), RoundWithNumDigitsDoubleToDouble(col 19, decimalPlaces 58)(children: FuncExpDoubleToDouble(col 18:double)(children: CastDecimalToDouble(col 17:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 17:decimal(12,4)) -> 18:double) -> 19:double) -> 20:double, FuncLnDoubleToDouble(col 22:double)(children: CastDecimalToDouble(col 21:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 21:decimal(12,4)) -> 22:double) -> 23:double, FuncLog10DoubleToDouble(col 25:double)(children: CastDecimalToDouble(col 24:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 24:decimal(12,4)) -> 25:double) -> 26:double, FuncLog2DoubleToDouble(col 28:double)(children: CastDecimalToDouble(col 27:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 27:decimal(12,4)) -> 28:double) -> 29:double, FuncLog2DoubleToDouble(col 31:double)(children: CastDecimalToDouble(col 81:decimal(13,4))(children: ConvertDecimal64ToDecimal(col 30:decimal(13,4)/DECIMAL_64)(children: Decimal64ColSubtractDecimal64Scalar(col 2:decimal(12,4)/DECIMAL_64, decimal64Val 156010000, decimalVal 15601) -> 30:decimal(13,4)/DECIMAL_64) -> 81:decimal(13,4)) -> 31:double) -> 32:double, FuncLogWithBaseDoubleToDouble(col 34:double)(children: CastDecimalToDouble(col 33:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 33:decimal(12,4)) -> 34:double) -> 35:double, FuncPowerDoubleToDouble(col 38:double)(children: FuncLog2DoubleToDouble(col 37:double)(children: CastDecimalToDouble(col 36:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 36:decimal(12,4)) -> 37:double) -> 38:double) -> 39:double, FuncPowerDoubleToDouble(col 42:double)(children: FuncLog2DoubleToDouble(col 41:double)(children: CastDecimalToDouble(col 40:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 40:decimal(12,4)) -> 41:double) -> 42:double) -> 43:double, FuncSqrtDoubleToDouble(col 45:double)(children: CastDecimalToDouble(col 44:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 44:decimal(12,4)) -> 45:double) -> 46:double, FuncAbsDecimalToDecimal(col 47:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 47:decimal(12,4)) -> 48:decimal(12,4), FuncSinDoubleToDouble(col 50:double)(children: CastDecimalToDouble(col 49:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 49:decimal(12,4)) -> 50:double) -> 51:double, FuncASinDoubleToDouble(col 53:double)(children: CastDecimalToDouble(col 52:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 52:decimal(12,4)) -> 53:double) -> 54:double, FuncCosDoubleToDouble(col 56:double)(children: CastDecimalToDouble(col 55:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 55:decimal(12,4)) -> 56:double) -> 57:double, FuncACosDoubleToDouble(col 59:double)(children: CastDecimalToDouble(col 58:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 58:decimal(12,4)) -> 59:double) -> 60:double, FuncATanDoubleToDouble(col 62:double)(children: CastDecimalToDouble(col 61:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 61:decimal(12,4)) -> 62:double) -> 63:double, FuncDegreesDoubleToDouble(col 65:double)(children: CastDecimalToDouble(col 64:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 64:decimal(12,4)) -> 65:double) -> 66:double, FuncRadiansDoubleToDouble(col 68:double)(children: CastDecimalToDouble(col 67:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 67:decimal(12,4)) -> 68:double) -> 69:double, FuncNegateDecimalToDecimal(col 70:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 70:decimal(12,4)) -> 71:decimal(12,4), FuncSignDecimalToLong(col 72:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 72:decimal(12,4)) -> 73:int, FuncCosDoubleToDouble(col 79:double)(children: DoubleColAddDoubleScalar(col 78:double, val 3.14159)(children: DoubleColUnaryMinus(col 77:double)(children: FuncSinDoubleToDouble(col 76:double)(children: FuncLnDoubleToDouble(col 75:double)(children: CastDecimalToDouble(col 74:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 74:decimal(12,4)) -> 75:double) -> 76:double) -> 77:double) -> 78:double) -> 79:double) -> 80:double + selectExpressions: col 2:decimal(12,4)/DECIMAL_64, FuncRoundWithNumDigitsDecimalToDecimal(col 9:decimal(12,4), decimalPlaces 2)(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 9:decimal(12,4)) -> 10:decimal(11,2), FuncRoundDecimalToDecimal(col 11:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 11:decimal(12,4)) -> 12:decimal(9,0), FuncFloorDecimalToDecimal(col 13:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 13:decimal(12,4)) -> 14:decimal(9,0), FuncCeilDecimalToDecimal(col 15:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 15:decimal(12,4)) -> 16:decimal(9,0), RoundWithNumDigitsDoubleToDouble(col 19, decimalPlaces 58)(children: FuncExpDoubleToDouble(col 18:double)(children: CastDecimalToDouble(col 17:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 17:decimal(12,4)) -> 18:double) -> 19:double) -> 20:double, FuncLnDoubleToDouble(col 22:double)(children: CastDecimalToDouble(col 21:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 21:decimal(12,4)) -> 22:double) -> 23:double, FuncLog10DoubleToDouble(col 25:double)(children: CastDecimalToDouble(col 24:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 24:decimal(12,4)) -> 25:double) -> 26:double, FuncLog2DoubleToDouble(col 28:double)(children: CastDecimalToDouble(col 27:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 27:decimal(12,4)) -> 28:double) -> 29:double, FuncLog2DoubleToDouble(col 31:double)(children: CastDecimalToDouble(col 81:decimal(13,4))(children: ConvertDecimal64ToDecimal(col 30:decimal(13,4)/DECIMAL_64)(children: Decimal64ColSubtractDecimal64Scalar(col 2:decimal(12,4)/DECIMAL_64, decimal64Val 156010000, decimalVal 15601) -> 30:decimal(13,4)/DECIMAL_64) -> 81:decimal(13,4)) -> 31:double) -> 32:double, FuncLogWithBaseDoubleToDouble(col 34:double)(children: CastDecimalToDouble(col 33:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 33:decimal(12,4)) -> 34:double) -> 35:double, FuncPowerDoubleToDouble(col 38:double)(children: FuncLog2DoubleToDouble(col 37:double)(children: CastDecimalToDouble(col 36:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 36:decimal(12,4)) -> 37:double) -> 38:double) -> 39:double, FuncPowerDoubleToDouble(col 42:double)(children: FuncLog2DoubleToDouble(col 41:double)(children: CastDecimalToDouble(col 40:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 40:decimal(12,4)) -> 41:double) -> 42:double) -> 43:double, FuncSqrtDoubleToDouble(col 45:double)(children: CastDecimalToDouble(col 44:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 44:decimal(12,4)) -> 45:double) -> 46:double, FuncAbsDecimalToDecimal(col 47:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 47:decimal(12,4)) -> 48:decimal(12,4), FuncSinDoubleToDouble(col 50:double)(children: CastDecimalToDouble(col 49:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 49:decimal(12,4)) -> 50:double) -> 51:double, FuncASinDoubleToDouble(col 53:double)(children: CastDecimalToDouble(col 52:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 52:decimal(12,4)) -> 53:double) -> 54:double, FuncCosDoubleToDouble(col 56:double)(children: CastDecimalToDouble(col 55:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 55:decimal(12,4)) -> 56:double) -> 57:double, FuncACosDoubleToDouble(col 59:double)(children: CastDecimalToDouble(col 58:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 58:decimal(12,4)) -> 59:double) -> 60:double, FuncATanDoubleToDouble(col 62:double)(children: CastDecimalToDouble(col 61:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 61:decimal(12,4)) -> 62:double) -> 63:double, FuncDegreesDoubleToDouble(col 65:double)(children: CastDecimalToDouble(col 64:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 64:decimal(12,4)) -> 65:double) -> 66:double, FuncRadiansDoubleToDouble(col 68:double)(children: CastDecimalToDouble(col 67:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 67:decimal(12,4)) -> 68:double) -> 69:double, col 2:decimal(12,4)/DECIMAL_64, FuncNegateDecimalToDecimal(col 70:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 70:decimal(12,4)) -> 71:decimal(12,4), FuncSignDecimalToLong(col 72:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 72:decimal(12,4)) -> 73:int, FuncCosDoubleToDouble(col 79:double)(children: DoubleColAddDoubleScalar(col 78:double, val 3.14159)(children: DoubleColUnaryMinus(col 77:double)(children: FuncSinDoubleToDouble(col 76:double)(children: FuncLnDoubleToDouble(col 75:double)(children: CastDecimalToDouble(col 74:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 74:decimal(12,4)) -> 75:double) -> 76:double) -> 77:double) -> 78:double) -> 79:double) -> 80:double Statistics: Num rows: 2048 Data size: 2005696 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out index 8dd8d6196c..1ae3de7978 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out @@ -71,7 +71,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 3] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) + selectExpressions: col 0:decimal(10,0)/DECIMAL_64, FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -193,7 +193,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 3] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) + selectExpressions: col 0:decimal(10,0)/DECIMAL_64, FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) @@ -205,6 +205,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(10,0) + valueExpressions: col 0:decimal(10,0)/DECIMAL_64 Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,0)) Execution mode: vectorized, llap @@ -236,7 +237,7 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:decimal(11,0), VALUE._col0:decimal(10,0) + dataColumns: KEY.reducesinkkey0:decimal(11,0), VALUE._col0:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: @@ -247,6 +248,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1, 0] + selectExpressions: col 1:decimal(10,0)/DECIMAL_64 Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false @@ -615,7 +617,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 3] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) + selectExpressions: col 0:decimal(10,0)/DECIMAL_64, FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -737,7 +739,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 3] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) + selectExpressions: col 0:decimal(10,0)/DECIMAL_64, FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) @@ -749,6 +751,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(10,0) + valueExpressions: col 0:decimal(10,0)/DECIMAL_64 Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,0)) Execution mode: vectorized, llap @@ -780,7 +783,7 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:decimal(11,0), VALUE._col0:decimal(10,0) + dataColumns: KEY.reducesinkkey0:decimal(11,0), VALUE._col0:decimal(10,0)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: @@ -791,6 +794,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1, 0] + selectExpressions: col 1:decimal(10,0)/DECIMAL_64 Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out index 0ec8c80d4d..5db8dcfa7f 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out @@ -106,6 +106,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2] + selectExpressions: col 1:decimal(10,4)/DECIMAL_64, col 2:decimal(15,8)/DECIMAL_64 Statistics: Num rows: 30 Data size: 4936 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) @@ -117,6 +118,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:decimal(10,4), 2:decimal(15,8) + valueExpressions: col 1:decimal(10,4)/DECIMAL_64, col 2:decimal(15,8)/DECIMAL_64 Statistics: Num rows: 30 Data size: 4936 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(10,4)), _col2 (type: decimal(15,8)) Execution mode: vectorized, llap @@ -148,7 +150,7 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 3 - dataColumns: KEY.reducesinkkey0:int, VALUE._col0:decimal(10,4), VALUE._col1:decimal(15,8) + dataColumns: KEY.reducesinkkey0:int, VALUE._col0:decimal(10,4)/DECIMAL_64, VALUE._col1:decimal(15,8)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: @@ -159,6 +161,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2] + selectExpressions: col 1:decimal(10,4)/DECIMAL_64, col 2:decimal(15,8)/DECIMAL_64 Statistics: Num rows: 30 Data size: 4936 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out index f54064c235..1cafa6420e 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out @@ -5318,6 +5318,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(15,3)/DECIMAL_64 Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6504,6 +6505,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(15,3)/DECIMAL_64 Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(key), count(key) @@ -7479,7 +7481,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1, 0, 4, 9] - selectExpressions: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double, DoubleColMultiplyDoubleColumn(col 6:double, col 8:double)(children: CastDecimalToDouble(col 5:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 5:decimal(15,3)) -> 6:double, CastDecimalToDouble(col 7:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 7:decimal(15,3)) -> 8:double) -> 9:double + selectExpressions: col 0:decimal(15,3)/DECIMAL_64, CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double, DoubleColMultiplyDoubleColumn(col 6:double, col 8:double)(children: CastDecimalToDouble(col 5:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 5:decimal(15,3)) -> 6:double, CastDecimalToDouble(col 7:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 7:decimal(15,3)) -> 8:double) -> 9:double Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col3), sum(_col2), count(_col1) @@ -7649,7 +7651,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1, 0, 4, 9] - selectExpressions: CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double, DoubleColMultiplyDoubleColumn(col 6:double, col 8:double)(children: CastDecimalToDouble(col 5:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 5:decimal(15,3)) -> 6:double, CastDecimalToDouble(col 7:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 7:decimal(15,3)) -> 8:double) -> 9:double + selectExpressions: col 0:decimal(15,3)/DECIMAL_64, CastDecimalToDouble(col 3:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 3:decimal(15,3)) -> 4:double, DoubleColMultiplyDoubleColumn(col 6:double, col 8:double)(children: CastDecimalToDouble(col 5:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 5:decimal(15,3)) -> 6:double, CastDecimalToDouble(col 7:decimal(15,3))(children: ConvertDecimal64ToDecimal(col 0:decimal(15,3)/DECIMAL_64) -> 7:decimal(15,3)) -> 8:double) -> 9:double Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col3), sum(_col2), count(_col1) @@ -7909,6 +7911,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(15,3)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: min(key) @@ -7931,6 +7934,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(15,3) + valueExpressions: col 0:decimal(15,3)/DECIMAL_64 Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(15,3)) Execution mode: vectorized, llap @@ -7962,14 +7966,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 1 - dataColumns: VALUE._col0:decimal(15,3) + dataColumns: VALUE._col0:decimal(15,3)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: min(VALUE._col0) Group By Vectorization: - aggregators: VectorUDAFMinDecimal(col 0:decimal(15,3)) -> decimal(15,3) + aggregators: VectorUDAFMinDecimal64(col 0:decimal(15,3)/DECIMAL_64) -> decimal(15,3)/DECIMAL_64 className: VectorGroupByOperator groupByMode: MERGEPARTIAL native: false @@ -8045,6 +8049,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(15,3)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: max(key) @@ -8067,6 +8072,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(15,3) + valueExpressions: col 0:decimal(15,3)/DECIMAL_64 Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(15,3)) Execution mode: vectorized, llap @@ -8098,14 +8104,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 1 - dataColumns: VALUE._col0:decimal(15,3) + dataColumns: VALUE._col0:decimal(15,3)/DECIMAL_64 partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: max(VALUE._col0) Group By Vectorization: - aggregators: VectorUDAFMaxDecimal(col 0:decimal(15,3)) -> decimal(15,3) + aggregators: VectorUDAFMaxDecimal64(col 0:decimal(15,3)/DECIMAL_64) -> decimal(15,3)/DECIMAL_64 className: VectorGroupByOperator groupByMode: MERGEPARTIAL native: false @@ -8181,6 +8187,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(15,3)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(key) diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out index 3c79f3ea3a..236a48b780 100644 --- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out @@ -78,6 +78,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2] + selectExpressions: col 2:decimal(10,2)/DECIMAL_64 Statistics: Num rows: 12 Data size: 3384 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(c_dec), count(c_dec), count() @@ -223,6 +224,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2] + selectExpressions: col 2:decimal(10,2)/DECIMAL_64 Statistics: Num rows: 12 Data size: 3384 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(c_dec), count(c_dec), count() @@ -394,6 +396,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2] + selectExpressions: col 2:decimal(10,2)/DECIMAL_64 Statistics: Num rows: 12 Data size: 3384 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(c_dec), count(c_dec), count() diff --git a/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out b/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out index 94fa3cfb3a..f2c3a3dad1 100644 --- a/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out @@ -283,6 +283,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(c1) @@ -506,6 +507,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(15,2)/DECIMAL_64, col 1:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(c1) @@ -759,6 +761,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(15,2)/DECIMAL_64, col 1:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(15,2)) @@ -771,6 +774,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:decimal(15,2) + valueExpressions: col 1:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(15,2)) Execution mode: vectorized, llap @@ -813,6 +817,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(15,2)) @@ -1081,6 +1086,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(15,2)) @@ -1133,6 +1139,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(15,2)/DECIMAL_64, col 1:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(15,2)) @@ -1145,6 +1152,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:decimal(15,2) + valueExpressions: col 1:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(15,2)) Execution mode: vectorized, llap @@ -1402,6 +1410,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(15,2)/DECIMAL_64, col 1:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(15,2)) @@ -1414,6 +1423,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:decimal(15,2) + valueExpressions: col 1:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(15,2)) Execution mode: vectorized, llap @@ -1456,6 +1466,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(15,2)/DECIMAL_64, col 1:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(15,2)) @@ -1468,6 +1479,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:decimal(15,2) + valueExpressions: col 1:decimal(15,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(15,2)) Execution mode: vectorized, llap @@ -1633,6 +1645,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(c1) @@ -1655,6 +1668,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(17,2) + valueExpressions: col 0:decimal(17,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(17,2)) Execution mode: vectorized, llap @@ -1856,6 +1870,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64, col 1:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(c1) @@ -1883,6 +1898,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 2:decimal(17,2) + valueExpressions: col 2:decimal(17,2)/DECIMAL_64 Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(17,2)) Execution mode: vectorized, llap @@ -2109,6 +2125,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64, col 1:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(7,2)) @@ -2121,6 +2138,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:decimal(7,2) + valueExpressions: col 1:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(7,2)) Execution mode: vectorized, llap @@ -2163,6 +2181,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(7,2)) @@ -2431,6 +2450,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(7,2)) @@ -2483,6 +2503,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64, col 1:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(7,2)) @@ -2495,6 +2516,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:decimal(7,2) + valueExpressions: col 1:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(7,2)) Execution mode: vectorized, llap @@ -2752,6 +2774,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64, col 1:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(7,2)) @@ -2764,6 +2787,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:decimal(7,2) + valueExpressions: col 1:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(7,2)) Execution mode: vectorized, llap @@ -2806,6 +2830,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(7,2)/DECIMAL_64, col 1:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(7,2)) @@ -2818,6 +2843,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:decimal(7,2) + valueExpressions: col 1:decimal(7,2)/DECIMAL_64 Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(7,2)) Execution mode: vectorized, llap diff --git a/ql/src/test/results/clientpositive/llap/vector_string_decimal.q.out b/ql/src/test/results/clientpositive/llap/vector_string_decimal.q.out index 9b246d5b48..084d543fed 100644 --- a/ql/src/test/results/clientpositive/llap/vector_string_decimal.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_string_decimal.q.out @@ -85,6 +85,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(18,0)/DECIMAL_64 Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out index 5410237e9d..5d73da4f67 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out @@ -1097,6 +1097,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3, 8, 9] + selectExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) @@ -1109,6 +1110,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 8:timestamp, 9:decimal(4,2) + valueExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: decimal(4,2)) Execution mode: vectorized, llap @@ -1407,6 +1409,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3, 8, 9] + selectExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) @@ -1419,6 +1422,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 8:timestamp, 9:decimal(4,2) + valueExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: decimal(4,2)) Execution mode: vectorized, llap @@ -1719,6 +1723,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3, 8, 9] + selectExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) @@ -1731,6 +1736,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 8:timestamp, 9:decimal(4,2) + valueExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: decimal(4,2)) Execution mode: vectorized, llap diff --git a/ql/src/test/results/clientpositive/llap/vectorized_decimal64_boundary.q.out b/ql/src/test/results/clientpositive/llap/vectorized_decimal64_boundary.q.out index 88a66ebe2b..c9ad0a5d7c 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_decimal64_boundary.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_decimal64_boundary.q.out @@ -137,6 +137,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 18:decimal(18,2) + valueExpressions: col 18:decimal(18,2)/DECIMAL_64 Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(18,2)) Execution mode: vectorized, llap diff --git a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out index 49709488e7..a44d4e0fd2 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out @@ -291,6 +291,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] + selectExpressions: col 1:decimal(10,1)/DECIMAL_64 Statistics: Num rows: 100 Data size: 11200 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,1)) @@ -343,6 +344,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] + selectExpressions: col 1:decimal(10,1)/DECIMAL_64 Statistics: Num rows: 20 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,1)) @@ -362,6 +364,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] + selectExpressions: col 1:decimal(10,1)/DECIMAL_64 Statistics: Num rows: 20 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: min(_col0), max(_col0), bloom_filter(_col0, expectedEntries=20) @@ -384,6 +387,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(10,1), 1:decimal(10,1), 2:binary + valueExpressions: col 0:decimal(10,1)/DECIMAL_64, col 1:decimal(10,1)/DECIMAL_64 Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,1)), _col1 (type: decimal(10,1)), _col2 (type: binary) Execution mode: vectorized, llap @@ -477,14 +481,14 @@ STAGE PLANS: vectorized: true rowBatchContext: dataColumnCount: 3 - dataColumns: VALUE._col0:decimal(10,1), VALUE._col1:decimal(10,1), VALUE._col2:binary + dataColumns: VALUE._col0:decimal(10,1)/DECIMAL_64, VALUE._col1:decimal(10,1)/DECIMAL_64, VALUE._col2:binary partitionColumnCount: 0 scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: min(VALUE._col0), max(VALUE._col1), bloom_filter(VALUE._col2, expectedEntries=20) Group By Vectorization: - aggregators: VectorUDAFMinDecimal(col 0:decimal(10,1)) -> decimal(10,1), VectorUDAFMaxDecimal(col 1:decimal(10,1)) -> decimal(10,1), VectorUDAFBloomFilterMerge(col 2:binary) -> binary + aggregators: VectorUDAFMinDecimal64(col 0:decimal(10,1)/DECIMAL_64) -> decimal(10,1)/DECIMAL_64, VectorUDAFMaxDecimal64(col 1:decimal(10,1)/DECIMAL_64) -> decimal(10,1)/DECIMAL_64, VectorUDAFBloomFilterMerge(col 2:binary) -> binary className: VectorGroupByOperator groupByMode: FINAL native: false @@ -501,6 +505,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 0:decimal(10,1), 1:decimal(10,1), 2:binary + valueExpressions: col 0:decimal(10,1)/DECIMAL_64, col 1:decimal(10,1)/DECIMAL_64 Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,1)), _col1 (type: decimal(10,1)), _col2 (type: binary) diff --git a/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out b/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out index d81a2be366..21a711d9be 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out @@ -214,6 +214,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(8,1)/DECIMAL_64 Statistics: Num rows: 5 Data size: 580 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: @@ -427,6 +428,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(8,1)/DECIMAL_64 Statistics: Num rows: 5 Data size: 580 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: @@ -640,6 +642,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(8,1)/DECIMAL_64 Statistics: Num rows: 5 Data size: 580 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/mapjoin_decimal_vectorized.q.out b/ql/src/test/results/clientpositive/mapjoin_decimal_vectorized.q.out index 819b3f251b..49e72d6b60 100644 --- a/ql/src/test/results/clientpositive/mapjoin_decimal_vectorized.q.out +++ b/ql/src/test/results/clientpositive/mapjoin_decimal_vectorized.q.out @@ -140,6 +140,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1049 Data size: 117488 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: @@ -508,6 +509,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1049 Data size: 117488 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: @@ -682,7 +684,7 @@ STAGE PLANS: includeColumns: [0] dataColumns: dec:decimal(4,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(6,2), decimal(4,0)] + scratchColumnTypeNames: [decimal(6,2), decimal(4,0)/DECIMAL_64] Local Work: Map Reduce Local Work Reduce Vectorization: @@ -1046,7 +1048,7 @@ STAGE PLANS: includeColumns: [0] dataColumns: dec:decimal(4,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(6,2), decimal(4,0)] + scratchColumnTypeNames: [decimal(6,2), decimal(4,0)/DECIMAL_64] Local Work: Map Reduce Local Work Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/spark/vector_data_types.q.out b/ql/src/test/results/clientpositive/spark/vector_data_types.q.out index f54fb8aad5..914767f2f8 100644 --- a/ql/src/test/results/clientpositive/spark/vector_data_types.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_data_types.q.out @@ -248,6 +248,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + selectExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int) @@ -257,6 +258,7 @@ STAGE PLANS: className: VectorReduceSinkObjectHashOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: boolean), _col7 (type: string), _col8 (type: timestamp), _col9 (type: decimal(4,2)), _col10 (type: binary) diff --git a/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out index fb07d9ddbb..5c556da47e 100644 --- a/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out @@ -484,6 +484,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1, 2, 3] + selectExpressions: col 1:decimal(11,5)/DECIMAL_64, col 2:decimal(16,0)/DECIMAL_64 Statistics: Num rows: 12289 Data size: 346462 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count() @@ -511,6 +512,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:bigint, 2:decimal(11,5), 3:decimal(11,5), 4:decimal(21,5), 5:bigint, 6:decimal(16,0), 7:decimal(16,0), 8:decimal(26,0), 9:bigint + valueExpressions: col 2:decimal(11,5)/DECIMAL_64, col 3:decimal(11,5)/DECIMAL_64, col 6:decimal(16,0)/DECIMAL_64, col 7:decimal(16,0)/DECIMAL_64 Statistics: Num rows: 12289 Data size: 346462 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 (type: bigint), _col6 (type: decimal(16,0)), _col7 (type: decimal(16,0)), _col8 (type: decimal(26,0)), _col9 (type: bigint) Execution mode: vectorized @@ -686,7 +688,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3, 1, 2, 6, 11, 13, 18] - selectExpressions: CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, DoubleColMultiplyDoubleColumn(col 8:double, col 10:double)(children: CastDecimalToDouble(col 7:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 7:decimal(11,5)) -> 8:double, CastDecimalToDouble(col 9:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 9:decimal(11,5)) -> 10:double) -> 11:double, CastDecimalToDouble(col 12:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 12:decimal(16,0)) -> 13:double, DoubleColMultiplyDoubleColumn(col 15:double, col 17:double)(children: CastDecimalToDouble(col 14:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 14:decimal(16,0)) -> 15:double, CastDecimalToDouble(col 16:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 16:decimal(16,0)) -> 17:double) -> 18:double + selectExpressions: col 1:decimal(11,5)/DECIMAL_64, col 2:decimal(16,0)/DECIMAL_64, CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, DoubleColMultiplyDoubleColumn(col 8:double, col 10:double)(children: CastDecimalToDouble(col 7:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 7:decimal(11,5)) -> 8:double, CastDecimalToDouble(col 9:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 9:decimal(11,5)) -> 10:double) -> 11:double, CastDecimalToDouble(col 12:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 12:decimal(16,0)) -> 13:double, DoubleColMultiplyDoubleColumn(col 15:double, col 17:double)(children: CastDecimalToDouble(col 14:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 14:decimal(16,0)) -> 15:double, CastDecimalToDouble(col 16:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 16:decimal(16,0)) -> 17:double) -> 18:double Statistics: Num rows: 12289 Data size: 346462 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(_col1), max(_col1), min(_col1), sum(_col1), sum(_col4), sum(_col3), count(_col2), max(_col2), min(_col2), sum(_col2), sum(_col6), sum(_col5), count() @@ -714,6 +716,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true valueColumns: 1:bigint, 2:decimal(11,5), 3:decimal(11,5), 4:decimal(21,5), 5:double, 6:double, 7:bigint, 8:decimal(16,0), 9:decimal(16,0), 10:decimal(26,0), 11:double, 12:double, 13:bigint + valueExpressions: col 2:decimal(11,5)/DECIMAL_64, col 3:decimal(11,5)/DECIMAL_64, col 8:decimal(16,0)/DECIMAL_64, col 9:decimal(16,0)/DECIMAL_64 Statistics: Num rows: 12289 Data size: 346462 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: decimal(16,0)), _col9 (type: decimal(16,0)), _col10 (type: decimal(26,0)), _col11 (type: double), _col12 (type: double), _col13 (type: bigint) Execution mode: vectorized diff --git a/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out b/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out index aa6573f5e0..75bdebb014 100644 --- a/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out @@ -772,6 +772,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(14,0)/DECIMAL_64 Statistics: Num rows: 1049 Data size: 4966 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator Spark Hash Table Sink Vectorization: @@ -826,6 +827,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(14,2)/DECIMAL_64 Statistics: Num rows: 1049 Data size: 11234 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: @@ -1059,6 +1061,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(14,0)/DECIMAL_64, col 1:decimal(14,0)/DECIMAL_64 Statistics: Num rows: 1049 Data size: 4966 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator Spark Hash Table Sink Vectorization: @@ -1113,6 +1116,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(14,2)/DECIMAL_64, col 1:decimal(14,2)/DECIMAL_64 Statistics: Num rows: 1049 Data size: 11234 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out index b07f75dc00..35bbcd82ea 100644 --- a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out +++ b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out @@ -166,6 +166,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + selectExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 100 Data size: 34084 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/vector_data_types.q.out b/ql/src/test/results/clientpositive/vector_data_types.q.out index a6db4db157..2ed096c109 100644 --- a/ql/src/test/results/clientpositive/vector_data_types.q.out +++ b/ql/src/test/results/clientpositive/vector_data_types.q.out @@ -237,6 +237,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + selectExpressions: col 9:decimal(4,2)/DECIMAL_64 Statistics: Num rows: 1050 Data size: 358026 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int) diff --git a/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out b/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out index 83847be7b2..a2af4b2669 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out @@ -68,6 +68,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(10,0)/DECIMAL_64 Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -163,6 +164,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(10,0)/DECIMAL_64 Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) diff --git a/ql/src/test/results/clientpositive/vector_decimal_6.q.out b/ql/src/test/results/clientpositive/vector_decimal_6.q.out index c956429110..3bd1e6dd1a 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_6.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_6.q.out @@ -141,6 +141,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(10,5)/DECIMAL_64 Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,5)), _col1 (type: int) @@ -261,6 +262,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(17,4)/DECIMAL_64 Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(17,4)), _col1 (type: int) diff --git a/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out index 5379f47cea..89d336324b 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out @@ -406,6 +406,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1, 2, 3] + selectExpressions: col 1:decimal(11,5)/DECIMAL_64, col 2:decimal(16,0)/DECIMAL_64 Statistics: Num rows: 12289 Data size: 2091336 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count() @@ -572,7 +573,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3, 1, 2, 6, 11, 13, 18] - selectExpressions: CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, DoubleColMultiplyDoubleColumn(col 8:double, col 10:double)(children: CastDecimalToDouble(col 7:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 7:decimal(11,5)) -> 8:double, CastDecimalToDouble(col 9:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 9:decimal(11,5)) -> 10:double) -> 11:double, CastDecimalToDouble(col 12:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 12:decimal(16,0)) -> 13:double, DoubleColMultiplyDoubleColumn(col 15:double, col 17:double)(children: CastDecimalToDouble(col 14:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 14:decimal(16,0)) -> 15:double, CastDecimalToDouble(col 16:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 16:decimal(16,0)) -> 17:double) -> 18:double + selectExpressions: col 1:decimal(11,5)/DECIMAL_64, col 2:decimal(16,0)/DECIMAL_64, CastDecimalToDouble(col 5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, DoubleColMultiplyDoubleColumn(col 8:double, col 10:double)(children: CastDecimalToDouble(col 7:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 7:decimal(11,5)) -> 8:double, CastDecimalToDouble(col 9:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 9:decimal(11,5)) -> 10:double) -> 11:double, CastDecimalToDouble(col 12:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 12:decimal(16,0)) -> 13:double, DoubleColMultiplyDoubleColumn(col 15:double, col 17:double)(children: CastDecimalToDouble(col 14:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 14:decimal(16,0)) -> 15:double, CastDecimalToDouble(col 16:decimal(16,0))(children: ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 16:decimal(16,0)) -> 17:double) -> 18:double Statistics: Num rows: 12289 Data size: 2091336 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count(_col1), max(_col1), min(_col1), sum(_col1), sum(_col4), sum(_col3), count(_col2), max(_col2), min(_col2), sum(_col2), sum(_col6), sum(_col5), count() diff --git a/ql/src/test/results/clientpositive/vector_decimal_join.q.out b/ql/src/test/results/clientpositive/vector_decimal_join.q.out index 872e2ceea9..3e88b9f1f4 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_join.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_join.q.out @@ -135,7 +135,7 @@ STAGE PLANS: includeColumns: [0] dataColumns: x:int, y:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(7,2)] + scratchColumnTypeNames: [decimal(7,2)/DECIMAL_64] Local Work: Map Reduce Local Work Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out b/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out index d26ca931ec..dcc3bd819f 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out @@ -716,6 +716,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] + selectExpressions: col 0:decimal(14,2)/DECIMAL_64 Statistics: Num rows: 997 Data size: 106235 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: @@ -964,6 +965,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1] + selectExpressions: col 0:decimal(14,2)/DECIMAL_64, col 1:decimal(14,2)/DECIMAL_64 Statistics: Num rows: 997 Data size: 212470 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out b/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out index 599e12dd7f..fb7ea5415e 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out @@ -382,7 +382,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [2, 10, 12, 14, 16, 20, 23, 26, 29, 32, 35, 39, 43, 46, 48, 51, 54, 57, 60, 63, 66, 69, 2, 71, 73, 80] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 9:decimal(12,4), decimalPlaces 2)(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 9:decimal(12,4)) -> 10:decimal(11,2), FuncRoundDecimalToDecimal(col 11:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 11:decimal(12,4)) -> 12:decimal(9,0), FuncFloorDecimalToDecimal(col 13:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 13:decimal(12,4)) -> 14:decimal(9,0), FuncCeilDecimalToDecimal(col 15:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 15:decimal(12,4)) -> 16:decimal(9,0), RoundWithNumDigitsDoubleToDouble(col 19, decimalPlaces 58)(children: FuncExpDoubleToDouble(col 18:double)(children: CastDecimalToDouble(col 17:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 17:decimal(12,4)) -> 18:double) -> 19:double) -> 20:double, FuncLnDoubleToDouble(col 22:double)(children: CastDecimalToDouble(col 21:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 21:decimal(12,4)) -> 22:double) -> 23:double, FuncLog10DoubleToDouble(col 25:double)(children: CastDecimalToDouble(col 24:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 24:decimal(12,4)) -> 25:double) -> 26:double, FuncLog2DoubleToDouble(col 28:double)(children: CastDecimalToDouble(col 27:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 27:decimal(12,4)) -> 28:double) -> 29:double, FuncLog2DoubleToDouble(col 31:double)(children: CastDecimalToDouble(col 81:decimal(13,4))(children: ConvertDecimal64ToDecimal(col 30:decimal(13,4)/DECIMAL_64)(children: Decimal64ColSubtractDecimal64Scalar(col 2:decimal(12,4)/DECIMAL_64, decimal64Val 156010000, decimalVal 15601) -> 30:decimal(13,4)/DECIMAL_64) -> 81:decimal(13,4)) -> 31:double) -> 32:double, FuncLogWithBaseDoubleToDouble(col 34:double)(children: CastDecimalToDouble(col 33:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 33:decimal(12,4)) -> 34:double) -> 35:double, FuncPowerDoubleToDouble(col 38:double)(children: FuncLog2DoubleToDouble(col 37:double)(children: CastDecimalToDouble(col 36:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 36:decimal(12,4)) -> 37:double) -> 38:double) -> 39:double, FuncPowerDoubleToDouble(col 42:double)(children: FuncLog2DoubleToDouble(col 41:double)(children: CastDecimalToDouble(col 40:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 40:decimal(12,4)) -> 41:double) -> 42:double) -> 43:double, FuncSqrtDoubleToDouble(col 45:double)(children: CastDecimalToDouble(col 44:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 44:decimal(12,4)) -> 45:double) -> 46:double, FuncAbsDecimalToDecimal(col 47:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 47:decimal(12,4)) -> 48:decimal(12,4), FuncSinDoubleToDouble(col 50:double)(children: CastDecimalToDouble(col 49:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 49:decimal(12,4)) -> 50:double) -> 51:double, FuncASinDoubleToDouble(col 53:double)(children: CastDecimalToDouble(col 52:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 52:decimal(12,4)) -> 53:double) -> 54:double, FuncCosDoubleToDouble(col 56:double)(children: CastDecimalToDouble(col 55:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 55:decimal(12,4)) -> 56:double) -> 57:double, FuncACosDoubleToDouble(col 59:double)(children: CastDecimalToDouble(col 58:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 58:decimal(12,4)) -> 59:double) -> 60:double, FuncATanDoubleToDouble(col 62:double)(children: CastDecimalToDouble(col 61:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 61:decimal(12,4)) -> 62:double) -> 63:double, FuncDegreesDoubleToDouble(col 65:double)(children: CastDecimalToDouble(col 64:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 64:decimal(12,4)) -> 65:double) -> 66:double, FuncRadiansDoubleToDouble(col 68:double)(children: CastDecimalToDouble(col 67:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 67:decimal(12,4)) -> 68:double) -> 69:double, FuncNegateDecimalToDecimal(col 70:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 70:decimal(12,4)) -> 71:decimal(12,4), FuncSignDecimalToLong(col 72:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 72:decimal(12,4)) -> 73:int, FuncCosDoubleToDouble(col 79:double)(children: DoubleColAddDoubleScalar(col 78:double, val 3.14159)(children: DoubleColUnaryMinus(col 77:double)(children: FuncSinDoubleToDouble(col 76:double)(children: FuncLnDoubleToDouble(col 75:double)(children: CastDecimalToDouble(col 74:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 74:decimal(12,4)) -> 75:double) -> 76:double) -> 77:double) -> 78:double) -> 79:double) -> 80:double + selectExpressions: col 2:decimal(12,4)/DECIMAL_64, FuncRoundWithNumDigitsDecimalToDecimal(col 9:decimal(12,4), decimalPlaces 2)(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 9:decimal(12,4)) -> 10:decimal(11,2), FuncRoundDecimalToDecimal(col 11:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 11:decimal(12,4)) -> 12:decimal(9,0), FuncFloorDecimalToDecimal(col 13:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 13:decimal(12,4)) -> 14:decimal(9,0), FuncCeilDecimalToDecimal(col 15:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 15:decimal(12,4)) -> 16:decimal(9,0), RoundWithNumDigitsDoubleToDouble(col 19, decimalPlaces 58)(children: FuncExpDoubleToDouble(col 18:double)(children: CastDecimalToDouble(col 17:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 17:decimal(12,4)) -> 18:double) -> 19:double) -> 20:double, FuncLnDoubleToDouble(col 22:double)(children: CastDecimalToDouble(col 21:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 21:decimal(12,4)) -> 22:double) -> 23:double, FuncLog10DoubleToDouble(col 25:double)(children: CastDecimalToDouble(col 24:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 24:decimal(12,4)) -> 25:double) -> 26:double, FuncLog2DoubleToDouble(col 28:double)(children: CastDecimalToDouble(col 27:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 27:decimal(12,4)) -> 28:double) -> 29:double, FuncLog2DoubleToDouble(col 31:double)(children: CastDecimalToDouble(col 81:decimal(13,4))(children: ConvertDecimal64ToDecimal(col 30:decimal(13,4)/DECIMAL_64)(children: Decimal64ColSubtractDecimal64Scalar(col 2:decimal(12,4)/DECIMAL_64, decimal64Val 156010000, decimalVal 15601) -> 30:decimal(13,4)/DECIMAL_64) -> 81:decimal(13,4)) -> 31:double) -> 32:double, FuncLogWithBaseDoubleToDouble(col 34:double)(children: CastDecimalToDouble(col 33:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 33:decimal(12,4)) -> 34:double) -> 35:double, FuncPowerDoubleToDouble(col 38:double)(children: FuncLog2DoubleToDouble(col 37:double)(children: CastDecimalToDouble(col 36:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 36:decimal(12,4)) -> 37:double) -> 38:double) -> 39:double, FuncPowerDoubleToDouble(col 42:double)(children: FuncLog2DoubleToDouble(col 41:double)(children: CastDecimalToDouble(col 40:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 40:decimal(12,4)) -> 41:double) -> 42:double) -> 43:double, FuncSqrtDoubleToDouble(col 45:double)(children: CastDecimalToDouble(col 44:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 44:decimal(12,4)) -> 45:double) -> 46:double, FuncAbsDecimalToDecimal(col 47:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 47:decimal(12,4)) -> 48:decimal(12,4), FuncSinDoubleToDouble(col 50:double)(children: CastDecimalToDouble(col 49:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 49:decimal(12,4)) -> 50:double) -> 51:double, FuncASinDoubleToDouble(col 53:double)(children: CastDecimalToDouble(col 52:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 52:decimal(12,4)) -> 53:double) -> 54:double, FuncCosDoubleToDouble(col 56:double)(children: CastDecimalToDouble(col 55:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 55:decimal(12,4)) -> 56:double) -> 57:double, FuncACosDoubleToDouble(col 59:double)(children: CastDecimalToDouble(col 58:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 58:decimal(12,4)) -> 59:double) -> 60:double, FuncATanDoubleToDouble(col 62:double)(children: CastDecimalToDouble(col 61:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 61:decimal(12,4)) -> 62:double) -> 63:double, FuncDegreesDoubleToDouble(col 65:double)(children: CastDecimalToDouble(col 64:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 64:decimal(12,4)) -> 65:double) -> 66:double, FuncRadiansDoubleToDouble(col 68:double)(children: CastDecimalToDouble(col 67:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 67:decimal(12,4)) -> 68:double) -> 69:double, col 2:decimal(12,4)/DECIMAL_64, FuncNegateDecimalToDecimal(col 70:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 70:decimal(12,4)) -> 71:decimal(12,4), FuncSignDecimalToLong(col 72:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 72:decimal(12,4)) -> 73:int, FuncCosDoubleToDouble(col 79:double)(children: DoubleColAddDoubleScalar(col 78:double, val 3.14159)(children: DoubleColUnaryMinus(col 77:double)(children: FuncSinDoubleToDouble(col 76:double)(children: FuncLnDoubleToDouble(col 75:double)(children: CastDecimalToDouble(col 74:decimal(12,4))(children: ConvertDecimal64ToDecimal(col 2:decimal(12,4)/DECIMAL_64) -> 74:decimal(12,4)) -> 75:double) -> 76:double) -> 77:double) -> 78:double) -> 79:double) -> 80:double Statistics: Num rows: 2048 Data size: 2005696 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false diff --git a/ql/src/test/results/clientpositive/vector_decimal_partition.q.out b/ql/src/test/results/clientpositive/vector_decimal_partition.q.out index bc85edfb7f..cca4cb997a 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_partition.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_partition.q.out @@ -70,6 +70,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [2] + selectExpressions: col 2:decimal(4,0)/DECIMAL_64 Statistics: Num rows: 2 Data size: 624 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() diff --git a/ql/src/test/results/clientpositive/vector_decimal_round.q.out b/ql/src/test/results/clientpositive/vector_decimal_round.q.out index 16dc32a116..97b4a9f683 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_round.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_round.q.out @@ -65,7 +65,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 3] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) + selectExpressions: col 0:decimal(10,0)/DECIMAL_64, FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -161,7 +161,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 3] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) + selectExpressions: col 0:decimal(10,0)/DECIMAL_64, FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) @@ -505,7 +505,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 3] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) + selectExpressions: col 0:decimal(10,0)/DECIMAL_64, FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) @@ -601,7 +601,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 3] - selectExpressions: FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) + selectExpressions: col 0:decimal(10,0)/DECIMAL_64, FuncRoundWithNumDigitsDecimalToDecimal(col 2:decimal(10,0), decimalPlaces -1)(children: ConvertDecimal64ToDecimal(col 0:decimal(10,0)/DECIMAL_64) -> 2:decimal(10,0)) -> 3:decimal(11,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: decimal(11,0)) diff --git a/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out b/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out index 73247f19be..f26d7115bb 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out @@ -100,6 +100,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2] + selectExpressions: col 1:decimal(10,4)/DECIMAL_64, col 2:decimal(15,8)/DECIMAL_64 Statistics: Num rows: 30 Data size: 4936 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) diff --git a/ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out b/ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out index 6817861660..9546249483 100644 --- a/ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out +++ b/ql/src/test/results/clientpositive/vectorized_mapjoin3.q.out @@ -331,7 +331,7 @@ STAGE PLANS: includeColumns: [0] dataColumns: _col0:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(8,1)] + scratchColumnTypeNames: [decimal(8,1)/DECIMAL_64] Local Work: Map Reduce Local Work @@ -514,7 +514,7 @@ STAGE PLANS: includeColumns: [0] dataColumns: _col0:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(8,1)] + scratchColumnTypeNames: [decimal(8,1)/DECIMAL_64] Local Work: Map Reduce Local Work @@ -697,7 +697,7 @@ STAGE PLANS: includeColumns: [0] dataColumns: _col0:int partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(8,1)] + scratchColumnTypeNames: [decimal(8,1)/DECIMAL_64] Local Work: Map Reduce Local Work diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java index 599dbc13a4..fed675fff5 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Properties; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; @@ -129,9 +130,9 @@ public static BinarySortableDeserializeRead ascendingNullsFirst(TypeInfo[] typeI int tag; } - public BinarySortableDeserializeRead(TypeInfo[] typeInfos, boolean useExternalBuffer, - boolean[] columnSortOrderIsDesc, byte[] columnNullMarker, byte[] columnNotNullMarker) { - super(typeInfos, useExternalBuffer); + public BinarySortableDeserializeRead(TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, + boolean useExternalBuffer, boolean[] columnSortOrderIsDesc, byte[] columnNullMarker, byte[] columnNotNullMarker) { + super(typeInfos, dataTypePhysicalVariations, useExternalBuffer); final int count = typeInfos.length; root = new Field(); @@ -146,6 +147,11 @@ public BinarySortableDeserializeRead(TypeInfo[] typeInfos, boolean useExternalBu internalBufferLen = -1; } + public BinarySortableDeserializeRead(TypeInfo[] typeInfos, boolean useExternalBuffer, + boolean[] columnSortOrderIsDesc, byte[] columnNullMarker, byte[] columnNotNullMarker) { + this(typeInfos, null, useExternalBuffer, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker); + } + // Not public since we must have column information. private BinarySortableDeserializeRead() { super(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java index 000dfed491..1bce5fd315 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java @@ -25,6 +25,7 @@ import java.util.Deque; import java.util.List; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.io.TimestampWritableV2; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils; @@ -80,6 +81,7 @@ Category category; PrimitiveCategory primitiveCategory; TypeInfo typeInfo; + DataTypePhysicalVariation dataTypePhysicalVariation; int index; int count; @@ -91,52 +93,59 @@ } public LazyBinaryDeserializeRead(TypeInfo[] typeInfos, boolean useExternalBuffer) { - super(typeInfos, useExternalBuffer); + this(typeInfos, null, useExternalBuffer); + } + + public LazyBinaryDeserializeRead(TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, + boolean useExternalBuffer) { + super(typeInfos, dataTypePhysicalVariations, useExternalBuffer); tempVInt = new VInt(); tempVLong = new VLong(); currentExternalBufferNeeded = false; root = new Field(); root.category = Category.STRUCT; - root.children = createFields(typeInfos); + root.children = createFields(typeInfos, dataTypePhysicalVariations); root.count = typeInfos.length; } - private Field[] createFields(TypeInfo[] typeInfos) { + private Field[] createFields(TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations) { final Field[] children = new Field[typeInfos.length]; for (int i = 0; i < typeInfos.length; i++) { - children[i] = createField(typeInfos[i]); + children[i] = createField(typeInfos[i], (dataTypePhysicalVariations != null && + dataTypePhysicalVariations.length > i) ? dataTypePhysicalVariations[i] : null); } return children; } - private Field createField(TypeInfo typeInfo) { + private Field createField(TypeInfo typeInfo, DataTypePhysicalVariation dataTypePhysicalVariation) { final Field field = new Field(); final Category category = typeInfo.getCategory(); field.category = category; field.typeInfo = typeInfo; + field.dataTypePhysicalVariation = dataTypePhysicalVariation; switch (category) { case PRIMITIVE: field.primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory(); break; case LIST: field.children = new Field[1]; - field.children[0] = createField(((ListTypeInfo) typeInfo).getListElementTypeInfo()); + field.children[0] = createField(((ListTypeInfo) typeInfo).getListElementTypeInfo(), null); break; case MAP: field.children = new Field[2]; - field.children[0] = createField(((MapTypeInfo) typeInfo).getMapKeyTypeInfo()); - field.children[1] = createField(((MapTypeInfo) typeInfo).getMapValueTypeInfo()); + field.children[0] = createField(((MapTypeInfo) typeInfo).getMapKeyTypeInfo(), null); + field.children[1] = createField(((MapTypeInfo) typeInfo).getMapValueTypeInfo(), null); break; case STRUCT: final StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; final List fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos(); - field.children = createFields(fieldTypeInfos.toArray(new TypeInfo[fieldTypeInfos.size()])); + field.children = createFields(fieldTypeInfos.toArray(new TypeInfo[fieldTypeInfos.size()]), null); break; case UNION: final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo; final List objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos(); - field.children = createFields(objectTypeInfos.toArray(new TypeInfo[objectTypeInfos.size()])); + field.children = createFields(objectTypeInfos.toArray(new TypeInfo[objectTypeInfos.size()]), null); break; default: throw new RuntimeException(); @@ -388,6 +397,12 @@ private boolean readPrimitive(Field field) throws IOException { final int scale = decimalTypeInfo.getScale(); decimalIsNull = !currentHiveDecimalWritable.mutateEnforcePrecisionScale(precision, scale); + if (!decimalIsNull) { + if (field.dataTypePhysicalVariation == DataTypePhysicalVariation.DECIMAL_64) { + currentDecimal64 = currentHiveDecimalWritable.serialize64(scale); + } + return true; + } } if (decimalIsNull) { return false;