diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index 2918a6852c..890b565ddd 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -260,6 +260,8 @@ minillaplocal.shared.query.files=alter_merge_2_orc.q,\ vector_date_1.q,\ vector_decimal64_div_decimal64scalar.q,\ vector_decimal64_div_decimal64column.q,\ + vector_decimal64_mul_decimal64scalar.q,\ + vector_decimal64_mul_decimal64column.q,\ vector_decimal_1.q,\ vector_decimal_10_0.q,\ vector_decimal_2.q,\ diff --git a/ql/src/gen/vectorization/ExpressionTemplates/Decimal64ColumnMultiplyDecimal64Column.txt b/ql/src/gen/vectorization/ExpressionTemplates/Decimal64ColumnMultiplyDecimal64Column.txt new file mode 100644 index 0000000000..9bfda5bdf8 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/Decimal64ColumnMultiplyDecimal64Column.txt @@ -0,0 +1,189 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; +import org.apache.hadoop.hive.ql.exec.vector.expressions.Decimal64Util; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Generated from template Decimal64ColumnArithmeticDecimal64Column.txt, which covers + * decimal64 arithmetic expressions between columns. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private final int colNum1; + private final int colNum2; + + public (int colNum1, int colNum2, int outputColumnNum) { + super(outputColumnNum); + this.colNum1 = colNum1; + this.colNum2 = colNum2; + } + + public () { + super(); + + // Dummy final assignments. + colNum1 = -1; + colNum2 = -1; + } + + @Override + public void evaluate(VectorizedRowBatch batch) throws HiveException { + + // return immediately if batch is empty + final int n = batch.size; + if (n == 0) { + return; + } + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + Decimal64ColumnVector inputColVector1 = (Decimal64ColumnVector) batch.cols[colNum1]; + Decimal64ColumnVector inputColVector2 = (Decimal64ColumnVector) batch.cols[colNum2]; + Decimal64ColumnVector outputColVector = (Decimal64ColumnVector) batch.cols[outputColumnNum]; + int[] sel = batch.selected; + + long[] vector1 = inputColVector1.vector; + long[] vector2 = inputColVector2.vector; + long[] outputVector = outputColVector.vector; + boolean[] outputIsNull = outputColVector.isNull; + + final long outputDecimal64AbsMax = + HiveDecimalWritable.getDecimal64AbsMax(outputColVector.precision); + + /* + * Propagate null values for a two-input operator and set isRepeating and noNulls appropriately. + */ + NullUtil.propagateNullsColCol( + inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse); + + /* + * Disregard nulls for processing. In other words, + * the arithmetic operation is performed even if one or + * more inputs are null. This is to improve speed by avoiding + * conditional checks in the inner loop. + */ + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + final long result = vector1[0] * vector2[0]; + outputVector[0] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputColVector.noNulls = false; + outputIsNull[0] = true; + } + } else if (inputColVector1.isRepeating) { + final long repeatedValue1 = vector1[0]; + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + final long result = repeatedValue1 * vector2[i]; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputColVector.noNulls = false; + outputIsNull[i] = true; + } + } + } else { + for(int i = 0; i != n; i++) { + final long result = repeatedValue1 * vector2[i]; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputColVector.noNulls = false; + outputIsNull[i] = true; + } + } + } + } else if (inputColVector2.isRepeating) { + final long repeatedValue2 = vector2[0]; + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + final long result = vector1[i] * repeatedValue2; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputColVector.noNulls = false; + outputIsNull[i] = true; + } + } + } else { + for(int i = 0; i != n; i++) { + final long result = vector1[i] * repeatedValue2; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputColVector.noNulls = false; + outputIsNull[i] = true; + } + } + } + } else { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + final long result = vector1[i] * vector2[i]; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputColVector.noNulls = false; + outputIsNull[i] = true; + } + } + } else { + for(int i = 0; i != n; i++) { + final long result = vector1[i] * vector2[i]; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputColVector.noNulls = false; + outputIsNull[i] = true; + } + } + } + } + + // Currently, we defer division, etc to regular HiveDecimal so we don't do any null + // default value setting here. + } + + @Override + public String vectorExpressionParameters() { + return getColumnParamString(0, colNum1) + ", " + getColumnParamString(1, colNum2); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.DECIMAL_64, + VectorExpressionDescriptor.ArgumentType.DECIMAL_64) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/Decimal64ColumnMultiplyDecimal64Scalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/Decimal64ColumnMultiplyDecimal64Scalar.txt new file mode 100644 index 0000000000..d5c1457069 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/Decimal64ColumnMultiplyDecimal64Scalar.txt @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import java.util.Arrays; + +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.Decimal64Util; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Generated from template ColumnArithmeticScalar.txt, which covers decimal64 arithmetic + * expressions between a column and a scalar. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private final int colNum; + private final long value; + + public (int colNum, long value, int outputColumnNum) { + super(outputColumnNum); + this.colNum = colNum; + this.value = value; + } + + public () { + super(); + + // Dummy final assignments. + colNum = -1; + value = 0; + } + + @Override + public void evaluate(VectorizedRowBatch batch) throws HiveException { + + // return immediately if batch is empty + final int n = batch.size; + if (n == 0) { + return; + } + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + Decimal64ColumnVector inputColVector = (Decimal64ColumnVector) batch.cols[colNum]; + Decimal64ColumnVector outputColVector = (Decimal64ColumnVector) batch.cols[outputColumnNum]; + int[] sel = batch.selected; + boolean[] inputIsNull = inputColVector.isNull; + boolean[] outputIsNull = outputColVector.isNull; + + // We do not need to do a column reset since we are carefully changing the output. + outputColVector.isRepeating = false; + + long[] vector = inputColVector.vector; + long[] outputVector = outputColVector.vector; + + final long outputDecimal64AbsMax = + HiveDecimalWritable.getDecimal64AbsMax(outputColVector.precision); + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) inputTypeInfos[0]; + HiveDecimalWritable writable = new HiveDecimalWritable(); + writable.deserialize64(value, decimalTypeInfo.scale()); + long newValue = writable.longValue(); + + if (inputColVector.isRepeating) { + if (inputColVector.noNulls || !inputIsNull[0]) { + outputIsNull[0] = false; + // The following may override a "false" null setting if an error or overflow occurs. + final long result = vector[0] * newValue; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputIsNull[0] = true; + outputColVector.noNulls = false; + } else { + outputVector[0] = result; + } + } else { + outputIsNull[0] = true; + outputColVector.noNulls = false; + } + outputColVector.isRepeating = true; + return; + } + + if (inputColVector.noNulls) { + if (batch.selectedInUse) { + + // CONSIDER: For large n, fill n or all of isNull array and use the tighter ELSE loop. + + if (!outputColVector.noNulls) { + for(int j = 0; j != n; j++) { + final int i = sel[j]; + outputIsNull[i] = false; + final long result = vector[i] * newValue; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputColVector.noNulls = false; + outputIsNull[i] = true; + } + } + } else { + for(int j = 0; j != n; j++) { + final int i = sel[j]; + final long result = vector[i] * newValue; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputColVector.noNulls = false; + outputIsNull[i] = true; + } + } + } + } else { + if (!outputColVector.noNulls) { + + // Assume it is almost always a performance win to fill all of isNull so we can + // safely reset noNulls. + Arrays.fill(outputIsNull, false); + outputColVector.noNulls = true; + } + for(int i = 0; i != n; i++) { + final long result = vector[i] * newValue; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputColVector.noNulls = false; + outputIsNull[i] = true; + } + } + } + } else /* there are NULLs in the inputColVector */ { + + /* + * Do careful maintenance of the outputColVector.noNulls flag. + */ + + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!inputIsNull[i]) { + outputIsNull[i] = false; + // The following may override a "false" null setting if an error or overflow occurs. + final long result = vector[i] * newValue; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } else { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } + } else { + System.arraycopy(inputIsNull, 0, outputIsNull, 0, n); + for(int i = 0; i != n; i++) { + if (!inputIsNull[i]) { + outputIsNull[i] = false; + // The following may override a "false" null setting if an error or overflow occurs. + final long result = vector[i] * newValue; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } else { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } + } + } + } + + @Override + public String vectorExpressionParameters() { + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) inputTypeInfos[0]; + HiveDecimalWritable writable = new HiveDecimalWritable(); + writable.deserialize64(value, decimalTypeInfo.scale()); + return getColumnParamString(0, colNum) + ", decimal64Val " + value + + ", decimalVal " + writable.toString(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.DECIMAL_64, + VectorExpressionDescriptor.ArgumentType.DECIMAL_64) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/Decimal64ScalarMultiplyDecimal64Column.txt b/ql/src/gen/vectorization/ExpressionTemplates/Decimal64ScalarMultiplyDecimal64Column.txt new file mode 100644 index 0000000000..298171bed2 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/Decimal64ScalarMultiplyDecimal64Column.txt @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import java.util.Arrays; + +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.Decimal64Util; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Generated from template Decimal64ScalarArithmeticDecimal64Column.txt. + * Implements a vectorized arithmetic operator with a scalar on the left and a + * column vector on the right. The result is output to an output column vector. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum; + private long value; + + public (long value, int colNum, int outputColumnNum) { + super(outputColumnNum); + this.colNum = colNum; + this.value = value; + } + + public () { + super(); + + // Dummy final assignments. + colNum = -1; + value = 0; + } + + @Override + public void evaluate(VectorizedRowBatch batch) throws HiveException { + + // return immediately if batch is empty + final int n = batch.size; + if (n == 0) { + return; + } + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + Decimal64ColumnVector inputColVector = (Decimal64ColumnVector) batch.cols[colNum]; + Decimal64ColumnVector outputColVector = (Decimal64ColumnVector) batch.cols[outputColumnNum]; + int[] sel = batch.selected; + boolean[] inputIsNull = inputColVector.isNull; + boolean[] outputIsNull = outputColVector.isNull; + + // We do not need to do a column reset since we are carefully changing the output. + outputColVector.isRepeating = false; + + long[] vector = inputColVector.vector; + long[] outputVector = outputColVector.vector; + + final long outputDecimal64AbsMax = + HiveDecimalWritable.getDecimal64AbsMax(outputColVector.precision); + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) inputTypeInfos[1]; + HiveDecimalWritable writable = new HiveDecimalWritable(); + writable.deserialize64(value, decimalTypeInfo.scale()); + long newValue = writable.longValue(); + + if (inputColVector.isRepeating) { + if (inputColVector.noNulls || !inputIsNull[0]) { + outputIsNull[0] = false; + // The following may override a "false" null setting if an error or overflow occurs. + final long result = newValue * vector[0]; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputIsNull[0] = true; + outputColVector.noNulls = false; + } else { + outputVector[0] = result; + } + } else { + outputIsNull[0] = true; + outputColVector.noNulls = false; + } + outputColVector.isRepeating = true; + return; + } + + if (inputColVector.noNulls) { + if (batch.selectedInUse) { + + // CONSIDER: For large n, fill n or all of isNull array and use the tighter ELSE loop. + + if (!outputColVector.noNulls) { + for(int j = 0; j != n; j++) { + final int i = sel[j]; + outputIsNull[i] = false; + final long result = newValue * vector[i]; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } + } else { + for(int j = 0; j != n; j++) { + final int i = sel[j]; + final long result = newValue * vector[i]; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } + } + } else { + if (!outputColVector.noNulls) { + + // Assume it is almost always a performance win to fill all of isNull so we can + // safely reset noNulls. + Arrays.fill(outputIsNull, false); + outputColVector.noNulls = true; + } + for(int i = 0; i != n; i++) { + final long result = newValue * vector[i]; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } + } + } else /* there are NULLs in the inputColVector */ { + + /* + * Do careful maintenance of the outputColVector.noNulls flag. + */ + + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!inputIsNull[i]) { + outputIsNull[i] = false; + // The following may override a "false" null setting if an error or overflow occurs. + final long result = newValue * vector[i]; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } else { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } + } else { + for(int i = 0; i != n; i++) { + if (!inputIsNull[i]) { + outputIsNull[i] = false; + // The following may override a "false" null setting if an error or overflow occurs. + final long result = newValue * vector[i]; + outputVector[i] = result; + if (Math.abs(result) > outputDecimal64AbsMax) { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } else { + outputIsNull[i] = true; + outputColVector.noNulls = false; + } + } + } + } + } + + @Override + public String vectorExpressionParameters() { + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) inputTypeInfos[1]; + HiveDecimalWritable writable = new HiveDecimalWritable(); + writable.deserialize64(value, decimalTypeInfo.scale()); + return "decimal64Val " + value + ", decimalVal " + writable.toString() + + ", " + getColumnParamString(1, colNum); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.DECIMAL_64, + VectorExpressionDescriptor.ArgumentType.DECIMAL_64) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index ce6adc4a6b..d557fe150a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -1788,7 +1788,8 @@ private VectorExpression getDecimal64VectorExpressionForUdf(GenericUDF genericUd if((leftType.precision() + returnDecimalTypeInfo.getScale()) > 18) { return null; } - } else if (returnDecimalTypeInfo.getScale() != decimal64ColumnScale) { + } else if (returnDecimalTypeInfo.getScale() != decimal64ColumnScale + && !(genericUdf instanceof GenericUDFOPMultiply)) { return null; } returnDataTypePhysicalVariation = DataTypePhysicalVariation.DECIMAL_64; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java index 0c0ce68311..dfee4115b2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java @@ -22,6 +22,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressionsSupportDecimal64; import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColDivideLongColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColDivideLongScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarDivideLongColumn; @@ -50,6 +51,7 @@ DecimalColDivideDecimalColumn.class, DecimalColDivideDecimalScalar.class, DecimalScalarDivideDecimalColumn.class, Decimal64ColDivideDecimal64Scalar.class, Decimal64ColDivideDecimal64Column.class}) +@VectorizedExpressionsSupportDecimal64() public class GenericUDFOPDivide extends GenericUDFBaseNumeric { public GenericUDFOPDivide() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java index 616641d2f0..e42ac497ca 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressionsSupportDecimal64; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -46,7 +47,9 @@ DoubleScalarMultiplyLongColumn.class, DoubleScalarMultiplyDoubleColumn.class, DoubleScalarMultiplyLongColumnChecked.class, DoubleScalarMultiplyDoubleColumnChecked.class, DecimalColMultiplyDecimalColumn.class, DecimalColMultiplyDecimalScalar.class, - DecimalScalarMultiplyDecimalColumn.class}) + DecimalScalarMultiplyDecimalColumn.class, Decimal64ColMultiplyDecimal64Scalar.class, + Decimal64ColMultiplyDecimal64Column.class, Decimal64ScalarMultiplyDecimal64Column.class}) +@VectorizedExpressionsSupportDecimal64() public class GenericUDFOPMultiply extends GenericUDFBaseNumeric { public GenericUDFOPMultiply() { diff --git a/ql/src/test/queries/clientpositive/vector_decimal64_mul_decimal64column.q b/ql/src/test/queries/clientpositive/vector_decimal64_mul_decimal64column.q new file mode 100644 index 0000000000..d660d39db5 --- /dev/null +++ b/ql/src/test/queries/clientpositive/vector_decimal64_mul_decimal64column.q @@ -0,0 +1,6 @@ +create external table vector_decimal64_mul_decimal64column(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE; +LOAD DATA LOCAL INPATH '../../data/files/decimal64table.csv' OVERWRITE INTO TABLE vector_decimal64_mul_decimal64column; +create table vector_decimal64_mul_decimal64column_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) stored as ORC; +insert into table vector_decimal64_mul_decimal64column_tmp select * from vector_decimal64_mul_decimal64column; +explain vectorization detail select sum(ss_ext_list_price*ss_ext_discount_amt) from vector_decimal64_mul_decimal64column_tmp; +select sum(ss_ext_list_price*ss_ext_discount_amt) from vector_decimal64_mul_decimal64column_tmp; diff --git a/ql/src/test/queries/clientpositive/vector_decimal64_mul_decimal64scalar.q b/ql/src/test/queries/clientpositive/vector_decimal64_mul_decimal64scalar.q new file mode 100644 index 0000000000..9c88f584c9 --- /dev/null +++ b/ql/src/test/queries/clientpositive/vector_decimal64_mul_decimal64scalar.q @@ -0,0 +1,6 @@ +create external table vector_decimal64_mul_decimal64scalar(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE; +LOAD DATA LOCAL INPATH '../../data/files/decimal64table.csv' OVERWRITE INTO TABLE vector_decimal64_mul_decimal64scalar; +create table vector_decimal64_mul_decimal64scalar_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) stored as ORC; +insert into table vector_decimal64_mul_decimal64scalar_tmp select * from vector_decimal64_mul_decimal64scalar; +explain vectorization detail select sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)*2.2) from vector_decimal64_mul_decimal64scalar_tmp; +select sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)*2.2) from vector_decimal64_mul_decimal64scalar_tmp; diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal64_mul_decimal64column.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal64_mul_decimal64column.q.out new file mode 100644 index 0000000000..ae4ac0288f --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/vector_decimal64_mul_decimal64column.q.out @@ -0,0 +1,170 @@ +PREHOOK: query: create external table vector_decimal64_mul_decimal64column(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_decimal64_mul_decimal64column +POSTHOOK: query: create external table vector_decimal64_mul_decimal64column(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_decimal64_mul_decimal64column +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal64table.csv' OVERWRITE INTO TABLE vector_decimal64_mul_decimal64column +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@vector_decimal64_mul_decimal64column +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal64table.csv' OVERWRITE INTO TABLE vector_decimal64_mul_decimal64column +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@vector_decimal64_mul_decimal64column +PREHOOK: query: create table vector_decimal64_mul_decimal64column_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) stored as ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_decimal64_mul_decimal64column_tmp +POSTHOOK: query: create table vector_decimal64_mul_decimal64column_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) stored as ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_decimal64_mul_decimal64column_tmp +PREHOOK: query: insert into table vector_decimal64_mul_decimal64column_tmp select * from vector_decimal64_mul_decimal64column +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64column +PREHOOK: Output: default@vector_decimal64_mul_decimal64column_tmp +POSTHOOK: query: insert into table vector_decimal64_mul_decimal64column_tmp select * from vector_decimal64_mul_decimal64column +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64column +POSTHOOK: Output: default@vector_decimal64_mul_decimal64column_tmp +POSTHOOK: Lineage: vector_decimal64_mul_decimal64column_tmp.ss_ext_discount_amt SIMPLE [(vector_decimal64_mul_decimal64column)vector_decimal64_mul_decimal64column.FieldSchema(name:ss_ext_discount_amt, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64column_tmp.ss_ext_list_price SIMPLE [(vector_decimal64_mul_decimal64column)vector_decimal64_mul_decimal64column.FieldSchema(name:ss_ext_list_price, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64column_tmp.ss_ext_sales_price SIMPLE [(vector_decimal64_mul_decimal64column)vector_decimal64_mul_decimal64column.FieldSchema(name:ss_ext_sales_price, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64column_tmp.ss_ext_wholesale_cost SIMPLE [(vector_decimal64_mul_decimal64column)vector_decimal64_mul_decimal64column.FieldSchema(name:ss_ext_wholesale_cost, type:decimal(7,2), comment:null), ] +PREHOOK: query: explain vectorization detail select sum(ss_ext_list_price*ss_ext_discount_amt) from vector_decimal64_mul_decimal64column_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64column_tmp +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail select sum(ss_ext_list_price*ss_ext_discount_amt) from vector_decimal64_mul_decimal64column_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64column_tmp +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: vector_decimal64_mul_decimal64column_tmp + Statistics: Num rows: 1000 Data size: 224000 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:ss_ext_list_price:decimal(7,2)/DECIMAL_64, 1:ss_ext_wholesale_cost:decimal(7,2)/DECIMAL_64, 2:ss_ext_discount_amt:decimal(7,2)/DECIMAL_64, 3:ss_ext_sales_price:decimal(7,2)/DECIMAL_64, 4:ROW__ID:struct] + Select Operator + expressions: (ss_ext_list_price * ss_ext_discount_amt) (type: decimal(15,4)) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [5] + selectExpressions: Decimal64ColMultiplyDecimal64Column(col 0:decimal(7,2)/DECIMAL_64, col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(15,4)/DECIMAL_64 + Statistics: Num rows: 1000 Data size: 224000 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal64ToDecimal(col 5:decimal(15,4)/DECIMAL_64) -> decimal(25,4) + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:decimal(25,4) + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: decimal(25,4)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 2] + dataColumns: ss_ext_list_price:decimal(7,2)/DECIMAL_64, ss_ext_wholesale_cost:decimal(7,2)/DECIMAL_64, ss_ext_discount_amt:decimal(7,2)/DECIMAL_64, ss_ext_sales_price:decimal(7,2)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(15,4)/DECIMAL_64] + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:decimal(25,4) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal(col 0:decimal(25,4)) -> decimal(25,4) + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(ss_ext_list_price*ss_ext_discount_amt) from vector_decimal64_mul_decimal64column_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64column_tmp +#### A masked pattern was here #### +POSTHOOK: query: select sum(ss_ext_list_price*ss_ext_discount_amt) from vector_decimal64_mul_decimal64column_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64column_tmp +#### A masked pattern was here #### +984383500.0000 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal64_mul_decimal64scalar.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal64_mul_decimal64scalar.q.out new file mode 100644 index 0000000000..9058617a44 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/vector_decimal64_mul_decimal64scalar.q.out @@ -0,0 +1,170 @@ +PREHOOK: query: create external table vector_decimal64_mul_decimal64scalar(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_decimal64_mul_decimal64scalar +POSTHOOK: query: create external table vector_decimal64_mul_decimal64scalar(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_decimal64_mul_decimal64scalar +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal64table.csv' OVERWRITE INTO TABLE vector_decimal64_mul_decimal64scalar +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@vector_decimal64_mul_decimal64scalar +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal64table.csv' OVERWRITE INTO TABLE vector_decimal64_mul_decimal64scalar +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@vector_decimal64_mul_decimal64scalar +PREHOOK: query: create table vector_decimal64_mul_decimal64scalar_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) stored as ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_decimal64_mul_decimal64scalar_tmp +POSTHOOK: query: create table vector_decimal64_mul_decimal64scalar_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) stored as ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_decimal64_mul_decimal64scalar_tmp +PREHOOK: query: insert into table vector_decimal64_mul_decimal64scalar_tmp select * from vector_decimal64_mul_decimal64scalar +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64scalar +PREHOOK: Output: default@vector_decimal64_mul_decimal64scalar_tmp +POSTHOOK: query: insert into table vector_decimal64_mul_decimal64scalar_tmp select * from vector_decimal64_mul_decimal64scalar +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64scalar +POSTHOOK: Output: default@vector_decimal64_mul_decimal64scalar_tmp +POSTHOOK: Lineage: vector_decimal64_mul_decimal64scalar_tmp.ss_ext_discount_amt SIMPLE [(vector_decimal64_mul_decimal64scalar)vector_decimal64_mul_decimal64scalar.FieldSchema(name:ss_ext_discount_amt, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64scalar_tmp.ss_ext_list_price SIMPLE [(vector_decimal64_mul_decimal64scalar)vector_decimal64_mul_decimal64scalar.FieldSchema(name:ss_ext_list_price, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64scalar_tmp.ss_ext_sales_price SIMPLE [(vector_decimal64_mul_decimal64scalar)vector_decimal64_mul_decimal64scalar.FieldSchema(name:ss_ext_sales_price, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64scalar_tmp.ss_ext_wholesale_cost SIMPLE [(vector_decimal64_mul_decimal64scalar)vector_decimal64_mul_decimal64scalar.FieldSchema(name:ss_ext_wholesale_cost, type:decimal(7,2), comment:null), ] +PREHOOK: query: explain vectorization detail select sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)*2.2) from vector_decimal64_mul_decimal64scalar_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64scalar_tmp +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail select sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)*2.2) from vector_decimal64_mul_decimal64scalar_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64scalar_tmp +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: vector_decimal64_mul_decimal64scalar_tmp + Statistics: Num rows: 1000 Data size: 448000 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:ss_ext_list_price:decimal(7,2)/DECIMAL_64, 1:ss_ext_wholesale_cost:decimal(7,2)/DECIMAL_64, 2:ss_ext_discount_amt:decimal(7,2)/DECIMAL_64, 3:ss_ext_sales_price:decimal(7,2)/DECIMAL_64, 4:ROW__ID:struct] + Select Operator + expressions: ((((ss_ext_list_price - ss_ext_wholesale_cost) - ss_ext_discount_amt) + ss_ext_sales_price) * 2.2) (type: decimal(13,3)) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [8] + selectExpressions: Decimal64ColMultiplyDecimal64Scalar(col 7:decimal(10,2)/DECIMAL_64, decimal64Val 220, decimalVal 2.2)(children: Decimal64ColAddDecimal64Column(col 6:decimal(9,2)/DECIMAL_64, col 3:decimal(7,2)/DECIMAL_64)(children: Decimal64ColSubtractDecimal64Column(col 5:decimal(8,2)/DECIMAL_64, col 2:decimal(7,2)/DECIMAL_64)(children: Decimal64ColSubtractDecimal64Column(col 0:decimal(7,2)/DECIMAL_64, col 1:decimal(7,2)/DECIMAL_64) -> 5:decimal(8,2)/DECIMAL_64) -> 6:decimal(9,2)/DECIMAL_64) -> 7:decimal(10,2)/DECIMAL_64) -> 8:decimal(13,3)/DECIMAL_64 + Statistics: Num rows: 1000 Data size: 448000 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal64ToDecimal(col 8:decimal(13,3)/DECIMAL_64) -> decimal(23,3) + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:decimal(23,3) + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: decimal(23,3)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 1, 2, 3] + dataColumns: ss_ext_list_price:decimal(7,2)/DECIMAL_64, ss_ext_wholesale_cost:decimal(7,2)/DECIMAL_64, ss_ext_discount_amt:decimal(7,2)/DECIMAL_64, ss_ext_sales_price:decimal(7,2)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(8,2)/DECIMAL_64, decimal(9,2)/DECIMAL_64, decimal(10,2)/DECIMAL_64, decimal(13,3)/DECIMAL_64] + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:decimal(23,3) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal(col 0:decimal(23,3)) -> decimal(23,3) + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)*2.2) from vector_decimal64_mul_decimal64scalar_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64scalar_tmp +#### A masked pattern was here #### +POSTHOOK: query: select sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)*2.2) from vector_decimal64_mul_decimal64scalar_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64scalar_tmp +#### A masked pattern was here #### +20020000.000 diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out index a92fb6e6bb..a08f037577 100644 --- a/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out @@ -257,15 +257,15 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [7, 11, 14, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37] - selectExpressions: DecimalColAddDecimalColumn(col 5:decimal(10,3), col 6:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 5:decimal(10,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 6:decimal(7,2)) -> 7:decimal(11,3), DecimalColSubtractDecimalColumn(col 8:decimal(10,3), col 10:decimal(9,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 8:decimal(10,3), DecimalScalarMultiplyDecimalColumn(val 2, col 9:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 9:decimal(7,2)) -> 10:decimal(9,2)) -> 11:decimal(11,3), DecimalColDivideDecimalColumn(col 38:decimal(11,3), col 13:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 12:decimal(11,3)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,3)/DECIMAL_64, decimal64Val 2340, decimalVal 2.34) -> 12:decimal(11,3)/DECIMAL_64) -> 38:decimal(11,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 13:decimal(7,2)) -> 14:decimal(21,11), DecimalColMultiplyDecimalColumn(col 15:decimal(10,3), col 39:decimal(12,6))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 15:decimal(10,3), ConvertDecimal64ToDecimal(col 16:decimal(12,6)/DECIMAL_64)(children: Decimal64ColDivideDecimal64Scalar(col 2:decimal(7,2)/DECIMAL_64, decimal64Val 340, decimalVal 3.4) -> 16:decimal(12,6)/DECIMAL_64) -> 39:decimal(12,6)) -> 17:decimal(23,9), DecimalColModuloDecimalScalar(col 18:decimal(10,3), val 10)(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 18:decimal(10,3)) -> 19:decimal(5,3), CastDecimalToLong(col 20:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 20:decimal(10,3)) -> 21:int, CastDecimalToLong(col 22:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 22:decimal(7,2)) -> 23:smallint, CastDecimalToLong(col 24:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 24:decimal(7,2)) -> 25:tinyint, CastDecimalToLong(col 26:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 26:decimal(10,3)) -> 27:bigint, CastDecimalToBoolean(col 28:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 28:decimal(10,3)) -> 29:boolean, CastDecimalToDouble(col 30:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 30:decimal(7,2)) -> 31:double, CastDecimalToFloat(col 32:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 32:decimal(10,3)) -> 33:float, CastDecimalToString(col 34:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 34:decimal(7,2)) -> 35:string, CastDecimalToTimestamp(col 36:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 36:decimal(10,3)) -> 37:timestamp + projectedOutputColumnNums: [7, 10, 13, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36] + selectExpressions: DecimalColAddDecimalColumn(col 5:decimal(10,3), col 6:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 5:decimal(10,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 6:decimal(7,2)) -> 7:decimal(11,3), DecimalColSubtractDecimalColumn(col 8:decimal(10,3), col 37:decimal(9,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 8:decimal(10,3), ConvertDecimal64ToDecimal(col 9:decimal(9,2)/DECIMAL_64)(children: Decimal64ScalarMultiplyDecimal64Column(decimal64Val 200, decimalVal 2, col 2:decimal(7,2)/DECIMAL_64) -> 9:decimal(9,2)/DECIMAL_64) -> 37:decimal(9,2)) -> 10:decimal(11,3), DecimalColDivideDecimalColumn(col 38:decimal(11,3), col 12:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 11:decimal(11,3)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,3)/DECIMAL_64, decimal64Val 2340, decimalVal 2.34) -> 11:decimal(11,3)/DECIMAL_64) -> 38:decimal(11,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 12:decimal(7,2)) -> 13:decimal(21,11), DecimalColMultiplyDecimalColumn(col 14:decimal(10,3), col 39:decimal(12,6))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 14:decimal(10,3), ConvertDecimal64ToDecimal(col 15:decimal(12,6)/DECIMAL_64)(children: Decimal64ColDivideDecimal64Scalar(col 2:decimal(7,2)/DECIMAL_64, decimal64Val 340, decimalVal 3.4) -> 15:decimal(12,6)/DECIMAL_64) -> 39:decimal(12,6)) -> 16:decimal(23,9), DecimalColModuloDecimalScalar(col 17:decimal(10,3), val 10)(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 17:decimal(10,3)) -> 18:decimal(5,3), CastDecimalToLong(col 19:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 19:decimal(10,3)) -> 20:int, CastDecimalToLong(col 21:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 21:decimal(7,2)) -> 22:smallint, CastDecimalToLong(col 23:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 23:decimal(7,2)) -> 24:tinyint, CastDecimalToLong(col 25:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 25:decimal(10,3)) -> 26:bigint, CastDecimalToBoolean(col 27:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 27:decimal(10,3)) -> 28:boolean, CastDecimalToDouble(col 29:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 29:decimal(7,2)) -> 30:double, CastDecimalToFloat(col 31:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 31:decimal(10,3)) -> 32:float, CastDecimalToString(col 33:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 33:decimal(7,2)) -> 34:string, CastDecimalToTimestamp(col 35:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 35:decimal(10,3)) -> 36:timestamp Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(11,3)), _col1 (type: decimal(11,3)), _col2 (type: decimal(21,11)), _col3 (type: decimal(23,9)), _col4 (type: decimal(5,3)), _col5 (type: int), _col6 (type: smallint), _col7 (type: tinyint), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: double), _col11 (type: float), _col12 (type: string), _col13 (type: timestamp) sort order: ++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumns: 7:decimal(11,3), 11:decimal(11,3), 14:decimal(21,11), 17:decimal(23,9), 19:decimal(5,3), 21:int, 23:smallint, 25:tinyint, 27:bigint, 29:boolean, 31:double, 33:float, 35:string, 37:timestamp + keyColumns: 7:decimal(11,3), 10:decimal(11,3), 13:decimal(21,11), 16:decimal(23,9), 18:decimal(5,3), 20:int, 22:smallint, 24:tinyint, 26:bigint, 28:boolean, 30:double, 32:float, 34:string, 36:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE @@ -286,7 +286,7 @@ STAGE PLANS: includeColumns: [0, 1, 2] dataColumns: cdouble:double, cdecimal1:decimal(10,3)/DECIMAL_64, cdecimal2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(10,3), decimal(10,3), decimal(7,2), decimal(11,3), decimal(10,3), decimal(7,2), decimal(9,2), decimal(11,3), decimal(11,3)/DECIMAL_64, decimal(7,2), decimal(21,11), decimal(10,3), decimal(12,6)/DECIMAL_64, decimal(23,9), decimal(10,3), decimal(5,3), decimal(10,3), bigint, decimal(7,2), bigint, decimal(7,2), bigint, decimal(10,3), bigint, decimal(10,3), bigint, decimal(7,2), double, decimal(10,3), double, decimal(7,2), string, decimal(10,3), timestamp, decimal(11,3), decimal(12,6)] + scratchColumnTypeNames: [decimal(10,3), decimal(10,3), decimal(7,2), decimal(11,3), decimal(10,3), decimal(9,2)/DECIMAL_64, decimal(11,3), decimal(11,3)/DECIMAL_64, decimal(7,2), decimal(21,11), decimal(10,3), decimal(12,6)/DECIMAL_64, decimal(23,9), decimal(10,3), decimal(5,3), decimal(10,3), bigint, decimal(7,2), bigint, decimal(7,2), bigint, decimal(10,3), bigint, decimal(10,3), bigint, decimal(7,2), double, decimal(10,3), double, decimal(7,2), string, decimal(10,3), timestamp, decimal(9,2), decimal(11,3), decimal(12,6)] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git a/ql/src/test/results/clientpositive/vector_decimal64_mul_decimal64column.q.out b/ql/src/test/results/clientpositive/vector_decimal64_mul_decimal64column.q.out new file mode 100644 index 0000000000..a4cbd23d1b --- /dev/null +++ b/ql/src/test/results/clientpositive/vector_decimal64_mul_decimal64column.q.out @@ -0,0 +1,142 @@ +PREHOOK: query: create external table vector_decimal64_mul_decimal64column(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_decimal64_mul_decimal64column +POSTHOOK: query: create external table vector_decimal64_mul_decimal64column(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_decimal64_mul_decimal64column +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal64table.csv' OVERWRITE INTO TABLE vector_decimal64_mul_decimal64column +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@vector_decimal64_mul_decimal64column +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal64table.csv' OVERWRITE INTO TABLE vector_decimal64_mul_decimal64column +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@vector_decimal64_mul_decimal64column +PREHOOK: query: create table vector_decimal64_mul_decimal64column_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) stored as ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_decimal64_mul_decimal64column_tmp +POSTHOOK: query: create table vector_decimal64_mul_decimal64column_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) stored as ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_decimal64_mul_decimal64column_tmp +PREHOOK: query: insert into table vector_decimal64_mul_decimal64column_tmp select * from vector_decimal64_mul_decimal64column +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64column +PREHOOK: Output: default@vector_decimal64_mul_decimal64column_tmp +POSTHOOK: query: insert into table vector_decimal64_mul_decimal64column_tmp select * from vector_decimal64_mul_decimal64column +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64column +POSTHOOK: Output: default@vector_decimal64_mul_decimal64column_tmp +POSTHOOK: Lineage: vector_decimal64_mul_decimal64column_tmp.ss_ext_discount_amt SIMPLE [(vector_decimal64_mul_decimal64column)vector_decimal64_mul_decimal64column.FieldSchema(name:ss_ext_discount_amt, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64column_tmp.ss_ext_list_price SIMPLE [(vector_decimal64_mul_decimal64column)vector_decimal64_mul_decimal64column.FieldSchema(name:ss_ext_list_price, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64column_tmp.ss_ext_sales_price SIMPLE [(vector_decimal64_mul_decimal64column)vector_decimal64_mul_decimal64column.FieldSchema(name:ss_ext_sales_price, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64column_tmp.ss_ext_wholesale_cost SIMPLE [(vector_decimal64_mul_decimal64column)vector_decimal64_mul_decimal64column.FieldSchema(name:ss_ext_wholesale_cost, type:decimal(7,2), comment:null), ] +PREHOOK: query: explain vectorization detail select sum(ss_ext_list_price*ss_ext_discount_amt) from vector_decimal64_mul_decimal64column_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64column_tmp +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail select sum(ss_ext_list_price*ss_ext_discount_amt) from vector_decimal64_mul_decimal64column_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64column_tmp +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_decimal64_mul_decimal64column_tmp + Statistics: Num rows: 1000 Data size: 224000 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:ss_ext_list_price:decimal(7,2)/DECIMAL_64, 1:ss_ext_wholesale_cost:decimal(7,2)/DECIMAL_64, 2:ss_ext_discount_amt:decimal(7,2)/DECIMAL_64, 3:ss_ext_sales_price:decimal(7,2)/DECIMAL_64, 4:ROW__ID:struct] + Select Operator + expressions: (ss_ext_list_price * ss_ext_discount_amt) (type: decimal(15,4)) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [5] + selectExpressions: Decimal64ColMultiplyDecimal64Column(col 0:decimal(7,2)/DECIMAL_64, col 2:decimal(7,2)/DECIMAL_64) -> 5:decimal(15,4)/DECIMAL_64 + Statistics: Num rows: 1000 Data size: 224000 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal64ToDecimal(col 5:decimal(15,4)/DECIMAL_64) -> decimal(25,4) + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: decimal(25,4)) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 2] + dataColumns: ss_ext_list_price:decimal(7,2)/DECIMAL_64, ss_ext_wholesale_cost:decimal(7,2)/DECIMAL_64, ss_ext_discount_amt:decimal(7,2)/DECIMAL_64, ss_ext_sales_price:decimal(7,2)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(15,4)/DECIMAL_64] + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(ss_ext_list_price*ss_ext_discount_amt) from vector_decimal64_mul_decimal64column_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64column_tmp +#### A masked pattern was here #### +POSTHOOK: query: select sum(ss_ext_list_price*ss_ext_discount_amt) from vector_decimal64_mul_decimal64column_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64column_tmp +#### A masked pattern was here #### +984383500.0000 diff --git a/ql/src/test/results/clientpositive/vector_decimal64_mul_decimal64scalar.q.out b/ql/src/test/results/clientpositive/vector_decimal64_mul_decimal64scalar.q.out new file mode 100644 index 0000000000..b7cbb63b6d --- /dev/null +++ b/ql/src/test/results/clientpositive/vector_decimal64_mul_decimal64scalar.q.out @@ -0,0 +1,142 @@ +PREHOOK: query: create external table vector_decimal64_mul_decimal64scalar(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_decimal64_mul_decimal64scalar +POSTHOOK: query: create external table vector_decimal64_mul_decimal64scalar(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_decimal64_mul_decimal64scalar +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal64table.csv' OVERWRITE INTO TABLE vector_decimal64_mul_decimal64scalar +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@vector_decimal64_mul_decimal64scalar +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal64table.csv' OVERWRITE INTO TABLE vector_decimal64_mul_decimal64scalar +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@vector_decimal64_mul_decimal64scalar +PREHOOK: query: create table vector_decimal64_mul_decimal64scalar_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) stored as ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_decimal64_mul_decimal64scalar_tmp +POSTHOOK: query: create table vector_decimal64_mul_decimal64scalar_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(7,2), ss_ext_discount_amt decimal(7,2), ss_ext_sales_price decimal(7,2)) stored as ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_decimal64_mul_decimal64scalar_tmp +PREHOOK: query: insert into table vector_decimal64_mul_decimal64scalar_tmp select * from vector_decimal64_mul_decimal64scalar +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64scalar +PREHOOK: Output: default@vector_decimal64_mul_decimal64scalar_tmp +POSTHOOK: query: insert into table vector_decimal64_mul_decimal64scalar_tmp select * from vector_decimal64_mul_decimal64scalar +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64scalar +POSTHOOK: Output: default@vector_decimal64_mul_decimal64scalar_tmp +POSTHOOK: Lineage: vector_decimal64_mul_decimal64scalar_tmp.ss_ext_discount_amt SIMPLE [(vector_decimal64_mul_decimal64scalar)vector_decimal64_mul_decimal64scalar.FieldSchema(name:ss_ext_discount_amt, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64scalar_tmp.ss_ext_list_price SIMPLE [(vector_decimal64_mul_decimal64scalar)vector_decimal64_mul_decimal64scalar.FieldSchema(name:ss_ext_list_price, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64scalar_tmp.ss_ext_sales_price SIMPLE [(vector_decimal64_mul_decimal64scalar)vector_decimal64_mul_decimal64scalar.FieldSchema(name:ss_ext_sales_price, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_mul_decimal64scalar_tmp.ss_ext_wholesale_cost SIMPLE [(vector_decimal64_mul_decimal64scalar)vector_decimal64_mul_decimal64scalar.FieldSchema(name:ss_ext_wholesale_cost, type:decimal(7,2), comment:null), ] +PREHOOK: query: explain vectorization detail select sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)*2.2) from vector_decimal64_mul_decimal64scalar_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64scalar_tmp +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail select sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)*2.2) from vector_decimal64_mul_decimal64scalar_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64scalar_tmp +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_decimal64_mul_decimal64scalar_tmp + Statistics: Num rows: 1000 Data size: 448000 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:ss_ext_list_price:decimal(7,2)/DECIMAL_64, 1:ss_ext_wholesale_cost:decimal(7,2)/DECIMAL_64, 2:ss_ext_discount_amt:decimal(7,2)/DECIMAL_64, 3:ss_ext_sales_price:decimal(7,2)/DECIMAL_64, 4:ROW__ID:struct] + Select Operator + expressions: ((((ss_ext_list_price - ss_ext_wholesale_cost) - ss_ext_discount_amt) + ss_ext_sales_price) * 2.2) (type: decimal(13,3)) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [8] + selectExpressions: Decimal64ColMultiplyDecimal64Scalar(col 7:decimal(10,2)/DECIMAL_64, decimal64Val 220, decimalVal 2.2)(children: Decimal64ColAddDecimal64Column(col 6:decimal(9,2)/DECIMAL_64, col 3:decimal(7,2)/DECIMAL_64)(children: Decimal64ColSubtractDecimal64Column(col 5:decimal(8,2)/DECIMAL_64, col 2:decimal(7,2)/DECIMAL_64)(children: Decimal64ColSubtractDecimal64Column(col 0:decimal(7,2)/DECIMAL_64, col 1:decimal(7,2)/DECIMAL_64) -> 5:decimal(8,2)/DECIMAL_64) -> 6:decimal(9,2)/DECIMAL_64) -> 7:decimal(10,2)/DECIMAL_64) -> 8:decimal(13,3)/DECIMAL_64 + Statistics: Num rows: 1000 Data size: 448000 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal64ToDecimal(col 8:decimal(13,3)/DECIMAL_64) -> decimal(23,3) + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: decimal(23,3)) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 1, 2, 3] + dataColumns: ss_ext_list_price:decimal(7,2)/DECIMAL_64, ss_ext_wholesale_cost:decimal(7,2)/DECIMAL_64, ss_ext_discount_amt:decimal(7,2)/DECIMAL_64, ss_ext_sales_price:decimal(7,2)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(8,2)/DECIMAL_64, decimal(9,2)/DECIMAL_64, decimal(10,2)/DECIMAL_64, decimal(13,3)/DECIMAL_64] + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)*2.2) from vector_decimal64_mul_decimal64scalar_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_mul_decimal64scalar_tmp +#### A masked pattern was here #### +POSTHOOK: query: select sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)*2.2) from vector_decimal64_mul_decimal64scalar_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_mul_decimal64scalar_tmp +#### A masked pattern was here #### +20020000.000 diff --git a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out index efc9122d58..9660f8828f 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out @@ -223,8 +223,8 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [7, 11, 14, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37] - selectExpressions: DecimalColAddDecimalColumn(col 5:decimal(10,3), col 6:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 5:decimal(10,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 6:decimal(7,2)) -> 7:decimal(11,3), DecimalColSubtractDecimalColumn(col 8:decimal(10,3), col 10:decimal(9,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 8:decimal(10,3), DecimalScalarMultiplyDecimalColumn(val 2, col 9:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 9:decimal(7,2)) -> 10:decimal(9,2)) -> 11:decimal(11,3), DecimalColDivideDecimalColumn(col 38:decimal(11,3), col 13:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 12:decimal(11,3)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,3)/DECIMAL_64, decimal64Val 2340, decimalVal 2.34) -> 12:decimal(11,3)/DECIMAL_64) -> 38:decimal(11,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 13:decimal(7,2)) -> 14:decimal(21,11), DecimalColMultiplyDecimalColumn(col 15:decimal(10,3), col 39:decimal(12,6))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 15:decimal(10,3), ConvertDecimal64ToDecimal(col 16:decimal(12,6)/DECIMAL_64)(children: Decimal64ColDivideDecimal64Scalar(col 2:decimal(7,2)/DECIMAL_64, decimal64Val 340, decimalVal 3.4) -> 16:decimal(12,6)/DECIMAL_64) -> 39:decimal(12,6)) -> 17:decimal(23,9), DecimalColModuloDecimalScalar(col 18:decimal(10,3), val 10)(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 18:decimal(10,3)) -> 19:decimal(5,3), CastDecimalToLong(col 20:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 20:decimal(10,3)) -> 21:int, CastDecimalToLong(col 22:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 22:decimal(7,2)) -> 23:smallint, CastDecimalToLong(col 24:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 24:decimal(7,2)) -> 25:tinyint, CastDecimalToLong(col 26:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 26:decimal(10,3)) -> 27:bigint, CastDecimalToBoolean(col 28:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 28:decimal(10,3)) -> 29:boolean, CastDecimalToDouble(col 30:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 30:decimal(7,2)) -> 31:double, CastDecimalToFloat(col 32:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 32:decimal(10,3)) -> 33:float, CastDecimalToString(col 34:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 34:decimal(7,2)) -> 35:string, CastDecimalToTimestamp(col 36:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 36:decimal(10,3)) -> 37:timestamp + projectedOutputColumnNums: [7, 10, 13, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36] + selectExpressions: DecimalColAddDecimalColumn(col 5:decimal(10,3), col 6:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 5:decimal(10,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 6:decimal(7,2)) -> 7:decimal(11,3), DecimalColSubtractDecimalColumn(col 8:decimal(10,3), col 37:decimal(9,2))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 8:decimal(10,3), ConvertDecimal64ToDecimal(col 9:decimal(9,2)/DECIMAL_64)(children: Decimal64ScalarMultiplyDecimal64Column(decimal64Val 200, decimalVal 2, col 2:decimal(7,2)/DECIMAL_64) -> 9:decimal(9,2)/DECIMAL_64) -> 37:decimal(9,2)) -> 10:decimal(11,3), DecimalColDivideDecimalColumn(col 38:decimal(11,3), col 12:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 11:decimal(11,3)/DECIMAL_64)(children: Decimal64ColAddDecimal64Scalar(col 1:decimal(10,3)/DECIMAL_64, decimal64Val 2340, decimalVal 2.34) -> 11:decimal(11,3)/DECIMAL_64) -> 38:decimal(11,3), ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 12:decimal(7,2)) -> 13:decimal(21,11), DecimalColMultiplyDecimalColumn(col 14:decimal(10,3), col 39:decimal(12,6))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 14:decimal(10,3), ConvertDecimal64ToDecimal(col 15:decimal(12,6)/DECIMAL_64)(children: Decimal64ColDivideDecimal64Scalar(col 2:decimal(7,2)/DECIMAL_64, decimal64Val 340, decimalVal 3.4) -> 15:decimal(12,6)/DECIMAL_64) -> 39:decimal(12,6)) -> 16:decimal(23,9), DecimalColModuloDecimalScalar(col 17:decimal(10,3), val 10)(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 17:decimal(10,3)) -> 18:decimal(5,3), CastDecimalToLong(col 19:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 19:decimal(10,3)) -> 20:int, CastDecimalToLong(col 21:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 21:decimal(7,2)) -> 22:smallint, CastDecimalToLong(col 23:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 23:decimal(7,2)) -> 24:tinyint, CastDecimalToLong(col 25:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 25:decimal(10,3)) -> 26:bigint, CastDecimalToBoolean(col 27:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 27:decimal(10,3)) -> 28:boolean, CastDecimalToDouble(col 29:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 29:decimal(7,2)) -> 30:double, CastDecimalToFloat(col 31:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 31:decimal(10,3)) -> 32:float, CastDecimalToString(col 33:decimal(7,2))(children: ConvertDecimal64ToDecimal(col 2:decimal(7,2)/DECIMAL_64) -> 33:decimal(7,2)) -> 34:string, CastDecimalToTimestamp(col 35:decimal(10,3))(children: ConvertDecimal64ToDecimal(col 1:decimal(10,3)/DECIMAL_64) -> 35:decimal(10,3)) -> 36:timestamp Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(11,3)), _col1 (type: decimal(11,3)), _col2 (type: decimal(21,11)), _col3 (type: decimal(23,9)), _col4 (type: decimal(5,3)), _col5 (type: int), _col6 (type: smallint), _col7 (type: tinyint), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: double), _col11 (type: float), _col12 (type: string), _col13 (type: timestamp) @@ -251,7 +251,7 @@ STAGE PLANS: includeColumns: [0, 1, 2] dataColumns: cdouble:double, cdecimal1:decimal(10,3)/DECIMAL_64, cdecimal2:decimal(7,2)/DECIMAL_64 partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(10,3), decimal(10,3), decimal(7,2), decimal(11,3), decimal(10,3), decimal(7,2), decimal(9,2), decimal(11,3), decimal(11,3)/DECIMAL_64, decimal(7,2), decimal(21,11), decimal(10,3), decimal(12,6)/DECIMAL_64, decimal(23,9), decimal(10,3), decimal(5,3), decimal(10,3), bigint, decimal(7,2), bigint, decimal(7,2), bigint, decimal(10,3), bigint, decimal(10,3), bigint, decimal(7,2), double, decimal(10,3), double, decimal(7,2), string, decimal(10,3), timestamp, decimal(11,3), decimal(12,6)] + scratchColumnTypeNames: [decimal(10,3), decimal(10,3), decimal(7,2), decimal(11,3), decimal(10,3), decimal(9,2)/DECIMAL_64, decimal(11,3), decimal(11,3)/DECIMAL_64, decimal(7,2), decimal(21,11), decimal(10,3), decimal(12,6)/DECIMAL_64, decimal(23,9), decimal(10,3), decimal(5,3), decimal(10,3), bigint, decimal(7,2), bigint, decimal(7,2), bigint, decimal(10,3), bigint, decimal(10,3), bigint, decimal(7,2), double, decimal(10,3), double, decimal(7,2), string, decimal(10,3), timestamp, decimal(9,2), decimal(11,3), decimal(12,6)] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java index 82fc1415de..14e2fe0dad 100644 --- a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java +++ b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java @@ -319,6 +319,10 @@ {"Decimal64ColumnDivideDecimal64Scalar", "Divide", "/"}, {"Decimal64ColumnDivideDecimal64Column", "Divide", "/"}, + {"Decimal64ColumnMultiplyDecimal64Scalar", "Multiply", "*"}, + {"Decimal64ScalarMultiplyDecimal64Column", "Multiply", "*"}, + {"Decimal64ColumnMultiplyDecimal64Column", "Multiply", "*"}, + {"ColumnCompareScalar", "Equal", "long", "long", "=="}, {"ColumnCompareScalar", "Equal", "long", "double", "=="}, {"ColumnCompareScalar", "Equal", "double", "double", "=="}, @@ -1417,6 +1421,12 @@ private void generate() throws Exception { generateDecimal64ScalarArithmeticDecimal64Column(tdesc); } else if (tdesc[0].equals("Decimal64ColumnArithmeticDecimal64Column")) { generateDecimal64ColumnArithmeticDecimal64Column(tdesc); + } else if (tdesc[0].equals("Decimal64ColumnMultiplyDecimal64Scalar")) { + generateDecimal64ColumnArithmeticDecimal64Scalar(tdesc); + } else if (tdesc[0].equals("Decimal64ScalarMultiplyDecimal64Column")) { + generateDecimal64ScalarArithmeticDecimal64Column(tdesc); + } else if (tdesc[0].equals("Decimal64ColumnMultiplyDecimal64Column")) { + generateDecimal64ColumnArithmeticDecimal64Column(tdesc); } else if (tdesc[0].equals("Decimal64ColumnDivideDecimal64Scalar")) { generateDecimal64ColumnArithmeticDecimal64Scalar(tdesc); } else if (tdesc[0].equals("Decimal64ColumnDivideDecimal64Column")) {