diff --git common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java index ad48f69..0bb84e1 100644 --- common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java +++ common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java @@ -266,4 +266,17 @@ public static BigDecimal enforcePrecisionScale(BigDecimal bd, int maxPrecision, return bd; } + + public static HiveDecimal enforcePrecisionScale(HiveDecimal dec, int maxPrecision, int maxScale) { + if (dec == null) { + return null; + } + + BigDecimal bd = enforcePrecisionScale(dec.bd, maxPrecision, maxScale); + if (bd == null) { + return null; + } + + return HiveDecimal.create(bd); + } } diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index 2c84a36..3ae001d 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -161,9 +161,25 @@ minitez.query.files.shared=alter_merge_2_orc.q,\ vector_cast_constant.q,\ vector_char_4.q,\ vector_char_simple.q,\ + vector_coalesce.q,\ vector_count_distinct.q,\ vector_data_types.q,\ + vector_decimal_1.q,\ + vector_decimal_10_0.q,\ + vector_decimal_2.q,\ + vector_decimal_3.q,\ + vector_decimal_4.q,\ + vector_decimal_5.q,\ + vector_decimal_6.q,\ vector_decimal_aggregate.q,\ + vector_decimal_cast.q,\ + vector_decimal_expressions.q,\ + vector_decimal_mapjoin.q,\ + vector_decimal_math_funcs.q,\ + vector_decimal_precision.q,\ + vector_decimal_trailing.q,\ + vector_decimal_udf.q,\ + vector_decimal_udf2.q,\ vector_distinct_2.q,\ vector_elt.q,\ vector_groupby_3.q,\ @@ -196,6 +212,7 @@ minitez.query.files.shared=alter_merge_2_orc.q,\ vectorization_9.q,\ vectorization_decimal_date.q,\ vectorization_div0.q,\ + vectorization_limit.q,\ vectorization_nested_udf.q,\ vectorization_not.q,\ vectorization_part.q,\ @@ -204,7 +221,10 @@ minitez.query.files.shared=alter_merge_2_orc.q,\ vectorization_short_regress.q,\ vectorized_bucketmapjoin1.q,\ vectorized_case.q,\ + vectorized_casts.q,\ vectorized_context.q,\ + vectorized_date_funcs.q,\ + vectorized_distinct_gby.q,\ vectorized_mapjoin.q,\ vectorized_math_funcs.q,\ vectorized_nested_mapjoin.q,\ diff --git ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnDecimal.txt index 1609428..e1df589 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnDecimal.txt @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; /** * Generated from template ColumnArithmeticColumnDecimal.txt, which covers binary arithmetic @@ -61,8 +61,8 @@ public class extends VectorExpression { DecimalColumnVector outputColVector = (DecimalColumnVector) batch.cols[outputColumn]; int[] sel = batch.selected; int n = batch.size; - Decimal128[] vector1 = inputColVector1.vector; - Decimal128[] vector2 = inputColVector2.vector; + HiveDecimalWritable[] vector1 = inputColVector1.vector; + HiveDecimalWritable[] vector2 = inputColVector2.vector; // return immediately if batch is empty if (n == 0) { diff --git ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarDecimal.txt index 15feb07..0bb1532 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarDecimal.txt @@ -24,7 +24,8 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.common.type.HiveDecimal; /** * Generated from template ColumnArithmeticScalarDecimal.txt, which covers binary arithmetic @@ -35,10 +36,10 @@ public class extends VectorExpression { private static final long serialVersionUID = 1L; private int colNum; - private Decimal128 value; + private HiveDecimal value; private int outputColumn; - public (int colNum, Decimal128 value, int outputColumn) { + public (int colNum, HiveDecimal value, int outputColumn) { this.colNum = colNum; this.value = value; this.outputColumn = outputColumn; @@ -64,7 +65,7 @@ public class extends VectorExpression { outputColVector.noNulls = inputColVector.noNulls; outputColVector.isRepeating = inputColVector.isRepeating; int n = batch.size; - Decimal128[] vector = inputColVector.vector; + HiveDecimalWritable[] vector = inputColVector.vector; // return immediately if batch is empty if (n == 0) { @@ -129,26 +130,6 @@ public class extends VectorExpression { public int getOutputColumn() { return outputColumn; } - - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public Decimal128 getValue() { - return value; - } - - public void setValue(Decimal128 value) { - this.value = value; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { diff --git ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumnDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumnDecimal.txt index 418caac..623bcfb 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumnDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumnDecimal.txt @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; /** * Generated from template ColumnArithmeticColumnDecimal.txt, which covers binary arithmetic @@ -61,8 +61,8 @@ public class extends VectorExpression { DecimalColumnVector outputColVector = (DecimalColumnVector) batch.cols[outputColumn]; int[] sel = batch.selected; int n = batch.size; - Decimal128[] vector1 = inputColVector1.vector; - Decimal128[] vector2 = inputColVector2.vector; + HiveDecimalWritable[] vector1 = inputColVector1.vector; + HiveDecimalWritable[] vector2 = inputColVector2.vector; // return immediately if batch is empty if (n == 0) { @@ -138,26 +138,6 @@ public class extends VectorExpression { return outputColumn; } - public int getColNum1() { - return colNum1; - } - - public void setColNum1(int colNum1) { - this.colNum1 = colNum1; - } - - public int getColNum2() { - return colNum2; - } - - public void setColNum2(int colNum2) { - this.colNum2 = colNum2; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { return (new VectorExpressionDescriptor.Builder()) diff --git ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalarDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalarDecimal.txt index dbdb8f6..841ef93 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalarDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalarDecimal.txt @@ -24,7 +24,8 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.common.type.HiveDecimal; /** * Generated from template ColumnDivideScalarDecimal.txt, which covers binary arithmetic @@ -35,11 +36,10 @@ public class extends VectorExpression { private static final long serialVersionUID = 1L; private int colNum; - private Decimal128 value; + private HiveDecimal value; private int outputColumn; - private transient Decimal128 zero; // to hold constant 0 for later use - public (int colNum, Decimal128 value, int outputColumn) { + public (int colNum, HiveDecimal value, int outputColumn) { this.colNum = colNum; this.value = value; this.outputColumn = outputColumn; @@ -65,13 +65,8 @@ public class extends VectorExpression { outputColVector.noNulls = inputColVector.noNulls; outputColVector.isRepeating = inputColVector.isRepeating; int n = batch.size; - Decimal128[] vector = inputColVector.vector; - Decimal128[] outputVector = outputColVector.vector; - - // Initialize local variable to use as 0 value on first use. - if (zero == null) { - this.zero = new Decimal128(0, inputColVector.scale); - } + HiveDecimalWritable[] vector = inputColVector.vector; + HiveDecimalWritable[] outputVector = outputColVector.vector; // return immediately if batch is empty if (n == 0) { @@ -90,7 +85,7 @@ public class extends VectorExpression { } - if (value.compareTo(zero) == 0) { + if (value.compareTo(HiveDecimal.ZERO) == 0) { // Denominator is zero, convert the batch to nulls outputColVector.noNulls = false; @@ -142,26 +137,6 @@ public class extends VectorExpression { return outputColumn; } - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public Decimal128 getValue() { - return value; - } - - public void setValue(Decimal128 value) { - this.value = value; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { return (new VectorExpressionDescriptor.Builder()) diff --git ql/src/gen/vectorization/ExpressionTemplates/DecimalColumnUnaryFunc.txt ql/src/gen/vectorization/ExpressionTemplates/DecimalColumnUnaryFunc.txt index fba5258..619015e 100644 --- ql/src/gen/vectorization/ExpressionTemplates/DecimalColumnUnaryFunc.txt +++ ql/src/gen/vectorization/ExpressionTemplates/DecimalColumnUnaryFunc.txt @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.MathExpr; import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; @@ -59,7 +59,7 @@ public class extends VectorExpression { boolean[] outputIsNull = outputColVector.isNull; outputColVector.noNulls = inputColVector.noNulls; int n = batch.size; - Decimal128[] vector = inputColVector.vector; + HiveDecimalWritable[] vector = inputColVector.vector; // return immediately if batch is empty if (n == 0) { @@ -117,18 +117,6 @@ public class extends VectorExpression { public String getOutputType() { return outputType; } - - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnBetween.txt ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnBetween.txt index 829b9ca..d68edfa 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnBetween.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnBetween.txt @@ -18,8 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; @@ -37,10 +39,10 @@ public class extends VectorExpression { private int colNum; // The comparison is of the form "column BETWEEN leftValue AND rightValue" - private Decimal128 leftValue; - private Decimal128 rightValue; + private HiveDecimal leftValue; + private HiveDecimal rightValue; - public (int colNum, Decimal128 leftValue, Decimal128 rightValue) { + public (int colNum, HiveDecimal leftValue, HiveDecimal rightValue) { this.colNum = colNum; this.leftValue = leftValue; this.rightValue = rightValue; @@ -60,7 +62,7 @@ public class extends VectorExpression { int[] sel = batch.selected; boolean[] nullPos = inputColVector.isNull; int n = batch.size; - Decimal128[] vector = inputColVector.vector; + HiveDecimalWritable[] vector = inputColVector.vector; // return immediately if batch is empty if (n == 0) { @@ -72,7 +74,7 @@ public class extends VectorExpression { // All must be selected otherwise size would be zero. // Repeating property will not change. - if ((vector[0].compareTo(leftValue) < 0 || vector[0].compareTo(rightValue) > 0)) { + if ((DecimalUtil.compare(vector[0], leftValue) < 0 || DecimalUtil.compare(vector[0], rightValue) > 0)) { // Entire batch is filtered out. batch.size = 0; @@ -81,7 +83,7 @@ public class extends VectorExpression { int newSize = 0; for(int j = 0; j != n; j++) { int i = sel[j]; - if ((leftValue.compareTo(vector[i]) <= 0 && vector[i].compareTo(rightValue) <= 0)) { + if ((DecimalUtil.compare(leftValue, vector[i]) <= 0 && DecimalUtil.compare(vector[i], rightValue) <= 0)) { sel[newSize++] = i; } } @@ -89,7 +91,7 @@ public class extends VectorExpression { } else { int newSize = 0; for(int i = 0; i != n; i++) { - if ((leftValue.compareTo(vector[i]) <= 0 && vector[i].compareTo(rightValue) <= 0)) { + if ((DecimalUtil.compare(leftValue, vector[i]) <= 0 && DecimalUtil.compare(vector[i], rightValue) <= 0)) { sel[newSize++] = i; } } @@ -104,7 +106,7 @@ public class extends VectorExpression { // All must be selected otherwise size would be zero. // Repeating property will not change. if (!nullPos[0]) { - if ((vector[0].compareTo(leftValue) < 0 || vector[0].compareTo(rightValue) > 0)) { + if ((DecimalUtil.compare(vector[0], leftValue) < 0 || DecimalUtil.compare(vector[0], rightValue) > 0)) { // Entire batch is filtered out. batch.size = 0; @@ -117,7 +119,7 @@ public class extends VectorExpression { for(int j = 0; j != n; j++) { int i = sel[j]; if (!nullPos[i]) { - if ((leftValue.compareTo(vector[i]) <= 0 && vector[i].compareTo(rightValue) <= 0)) { + if ((DecimalUtil.compare(leftValue, vector[i]) <= 0 && DecimalUtil.compare(vector[i], rightValue) <= 0)) { sel[newSize++] = i; } } @@ -129,7 +131,7 @@ public class extends VectorExpression { int newSize = 0; for(int i = 0; i != n; i++) { if (!nullPos[i]) { - if ((leftValue.compareTo(vector[i]) <= 0 && vector[i].compareTo(rightValue) <= 0)) { + if ((DecimalUtil.compare(leftValue, vector[i]) <= 0 && DecimalUtil.compare(vector[i], rightValue) <= 0)) { sel[newSize++] = i; } } @@ -152,30 +154,6 @@ public class extends VectorExpression { return "boolean"; } - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public Decimal128 getLeftValue() { - return leftValue; - } - - public void setLeftValue(Decimal128 value) { - this.leftValue = value; - } - - public Decimal128 getRightValue() { - return rightValue; - } - - public void setRightValue(Decimal128 value) { - this.leftValue = value; - } - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { return (new VectorExpressionDescriptor.Builder()) diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareColumn.txt index 32c50d9..353e849 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareColumn.txt @@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; /** * Generated from template FilterDecimalColumnCompareColumn.txt, which covers binary comparison @@ -57,8 +57,8 @@ public class extends VectorExpression { boolean[] nullPos1 = inputColVector1.isNull; boolean[] nullPos2 = inputColVector2.isNull; int n = batch.size; - Decimal128[] vector1 = inputColVector1.vector; - Decimal128[] vector2 = inputColVector2.vector; + HiveDecimalWritable[] vector1 = inputColVector1.vector; + HiveDecimalWritable[] vector2 = inputColVector2.vector; // return immediately if batch is empty if (n == 0) { @@ -428,22 +428,6 @@ public class extends VectorExpression { public int getOutputColumn() { return -1; } - - public int getColNum1() { - return colNum1; - } - - public void setColNum1(int colNum1) { - this.colNum1 = colNum1; - } - - public int getColNum2() { - return colNum2; - } - - public void setColNum2(int colNum2) { - this.colNum2 = colNum2; - } @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareScalar.txt ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareScalar.txt index 34d0438..bdd39b9 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareScalar.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareScalar.txt @@ -20,9 +20,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.common.type.HiveDecimal; /** * This is a generated class to evaluate a comparison on a vector of decimal @@ -33,9 +35,9 @@ public class extends VectorExpression { private static final long serialVersionUID = 1L; private int colNum; - private Decimal128 value; + private HiveDecimal value; - public (int colNum, Decimal128 value) { + public (int colNum, HiveDecimal value) { this.colNum = colNum; this.value = value; } @@ -52,7 +54,7 @@ public class extends VectorExpression { int[] sel = batch.selected; boolean[] nullPos = inputColVector.isNull; int n = batch.size; - Decimal128[] vector = inputColVector.vector; + HiveDecimalWritable[] vector = inputColVector.vector; // return immediately if batch is empty if (n == 0) { @@ -63,7 +65,7 @@ public class extends VectorExpression { if (inputColVector.isRepeating) { // All must be selected otherwise size would be zero. Repeating property will not change. - if (!(vector[0].compareTo(value) 0)) { + if (!(DecimalUtil.compare(vector[0], value) 0)) { // Entire batch is filtered out. batch.size = 0; @@ -72,7 +74,7 @@ public class extends VectorExpression { int newSize = 0; for(int j = 0; j != n; j++) { int i = sel[j]; - if (vector[i].compareTo(value) 0) { + if (DecimalUtil.compare(vector[i], value) 0) { sel[newSize++] = i; } } @@ -80,7 +82,7 @@ public class extends VectorExpression { } else { int newSize = 0; for(int i = 0; i != n; i++) { - if (vector[i].compareTo(value) 0) { + if (DecimalUtil.compare(vector[i], value) 0) { sel[newSize++] = i; } } @@ -94,7 +96,7 @@ public class extends VectorExpression { // All must be selected otherwise size would be zero. Repeating property will not change. if (!nullPos[0]) { - if (!(vector[0].compareTo(value) 0)) { + if (!(DecimalUtil.compare(vector[0], value) 0)) { // Entire batch is filtered out. batch.size = 0; @@ -107,7 +109,7 @@ public class extends VectorExpression { for(int j = 0; j != n; j++) { int i = sel[j]; if (!nullPos[i]) { - if (vector[i].compareTo(value) 0) { + if (DecimalUtil.compare(vector[i], value) 0) { sel[newSize++] = i; } } @@ -119,7 +121,7 @@ public class extends VectorExpression { int newSize = 0; for(int i = 0; i != n; i++) { if (!nullPos[i]) { - if (vector[i].compareTo(value) 0) { + if (DecimalUtil.compare(vector[i], value) 0) { sel[newSize++] = i; } } @@ -142,22 +144,6 @@ public class extends VectorExpression { return "boolean"; } - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public Decimal128 getValue() { - return value; - } - - public void setValue(Decimal128 value) { - this.value = value; - } - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { return (new VectorExpressionDescriptor.Builder()) diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareColumn.txt index e3b7307..0608016 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareColumn.txt @@ -20,9 +20,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.common.type.HiveDecimal; /** * This is a generated class to evaluate a comparison on a vector of decimal @@ -33,9 +35,9 @@ public class extends VectorExpression { private static final long serialVersionUID = 1L; private int colNum; - private Decimal128 value; + private HiveDecimal value; - public (Decimal128 value, int colNum) { + public (HiveDecimal value, int colNum) { this.colNum = colNum; this.value = value; } @@ -52,7 +54,7 @@ public class extends VectorExpression { int[] sel = batch.selected; boolean[] nullPos = inputColVector.isNull; int n = batch.size; - Decimal128[] vector = inputColVector.vector; + HiveDecimalWritable[] vector = inputColVector.vector; // return immediately if batch is empty if (n == 0) { @@ -63,7 +65,7 @@ public class extends VectorExpression { if (inputColVector.isRepeating) { // All must be selected otherwise size would be zero. Repeating property will not change. - if (!(value.compareTo(vector[0]) 0)) { + if (!(DecimalUtil.compare(value, vector[0]) 0)) { // Entire batch is filtered out. batch.size = 0; @@ -72,7 +74,7 @@ public class extends VectorExpression { int newSize = 0; for(int j = 0; j != n; j++) { int i = sel[j]; - if (value.compareTo(vector[i]) 0) { + if (DecimalUtil.compare(value, vector[i]) 0) { sel[newSize++] = i; } } @@ -80,7 +82,7 @@ public class extends VectorExpression { } else { int newSize = 0; for(int i = 0; i != n; i++) { - if (value.compareTo(vector[i]) 0) { + if (DecimalUtil.compare(value, vector[i]) 0) { sel[newSize++] = i; } } @@ -94,7 +96,7 @@ public class extends VectorExpression { // All must be selected otherwise size would be zero. Repeating property will not change. if (!nullPos[0]) { - if (!(value.compareTo(vector[0]) 0)) { + if (!(DecimalUtil.compare(value, vector[0]) 0)) { // Entire batch is filtered out. batch.size = 0; @@ -107,7 +109,7 @@ public class extends VectorExpression { for(int j = 0; j != n; j++) { int i = sel[j]; if (!nullPos[i]) { - if (value.compareTo(vector[i]) 0) { + if (DecimalUtil.compare(value, vector[i]) 0) { sel[newSize++] = i; } } @@ -119,7 +121,7 @@ public class extends VectorExpression { int newSize = 0; for(int i = 0; i != n; i++) { if (!nullPos[i]) { - if (value.compareTo(vector[i]) 0) { + if (DecimalUtil.compare(value, vector[i]) 0) { sel[newSize++] = i; } } @@ -142,22 +144,6 @@ public class extends VectorExpression { return "boolean"; } - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public Decimal128 getValue() { - return value; - } - - public void setValue(Decimal128 value) { - this.value = value; - } - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { return (new VectorExpressionDescriptor.Builder()) diff --git ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnDecimal.txt index 967e0d4..ea55bec 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnDecimal.txt @@ -24,7 +24,8 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.common.type.HiveDecimal; /** * Generated from template ScalarArithmeticColumnDecimal.txt, which covers binary arithmetic @@ -35,10 +36,10 @@ public class extends VectorExpression { private static final long serialVersionUID = 1L; private int colNum; - private Decimal128 value; + private HiveDecimal value; private int outputColumn; - public (Decimal128 value, int colNum, int outputColumn) { + public (HiveDecimal value, int colNum, int outputColumn) { this.colNum = colNum; this.value = value; this.outputColumn = outputColumn; @@ -64,7 +65,7 @@ public class extends VectorExpression { outputColVector.noNulls = inputColVector.noNulls; outputColVector.isRepeating = inputColVector.isRepeating; int n = batch.size; - Decimal128[] vector = inputColVector.vector; + HiveDecimalWritable[] vector = inputColVector.vector; // return immediately if batch is empty if (n == 0) { @@ -126,26 +127,6 @@ public class extends VectorExpression { public int getOutputColumn() { return outputColumn; } - - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public Decimal128 getValue() { - return value; - } - - public void setValue(Decimal128 value) { - this.value = value; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { diff --git ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumnDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumnDecimal.txt index 84ed925..c8a5d17 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumnDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumnDecimal.txt @@ -24,7 +24,8 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.common.type.HiveDecimal; /** * Generated from template ScalarDivideColumnDecimal.txt, which covers binary arithmetic @@ -35,10 +36,10 @@ public class extends VectorExpression { private static final long serialVersionUID = 1L; private int colNum; - private Decimal128 value; + private HiveDecimal value; private int outputColumn; - public (Decimal128 value, int colNum, int outputColumn) { + public (HiveDecimal value, int colNum, int outputColumn) { this.colNum = colNum; this.value = value; this.outputColumn = outputColumn; @@ -64,8 +65,8 @@ public class extends VectorExpression { outputColVector.noNulls = inputColVector.noNulls; outputColVector.isRepeating = inputColVector.isRepeating; int n = batch.size; - Decimal128[] vector = inputColVector.vector; - Decimal128[] outputVector = outputColVector.vector; + HiveDecimalWritable[] vector = inputColVector.vector; + HiveDecimalWritable[] outputVector = outputColVector.vector; // return immediately if batch is empty if (n == 0) { @@ -129,26 +130,6 @@ public class extends VectorExpression { return outputColumn; } - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public Decimal128 getValue() { - return value; - } - - public void setValue(Decimal128 value) { - this.value = value; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { return (new VectorExpressionDescriptor.Builder()) diff --git ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt index ea3666a..6912ced 100644 --- ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt +++ ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen; -import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; @@ -31,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; /** @@ -49,7 +49,7 @@ public class extends VectorAggregateExpression { private static final long serialVersionUID = 1L; - transient private final Decimal128 value; + transient private final HiveDecimalWritable value; /** * Value is explicitly (re)initialized in reset() @@ -57,15 +57,16 @@ public class extends VectorAggregateExpression { transient private boolean isNull = true; public Aggregation() { - value = new Decimal128(); + value = new HiveDecimalWritable(); } - public void checkValue(Decimal128 value, short scale) { + public void checkValue(HiveDecimalWritable writable, short scale) { + HiveDecimal value = writable.getHiveDecimal(); if (isNull) { isNull = false; - this.value.update(value); - } else if (this.value.compareTo(value) 0) { - this.value.update(value, scale); + this.value.set(value); + } else if (this.value.getHiveDecimal().compareTo(value) 0) { + this.value.set(value); } } @@ -77,7 +78,7 @@ public class extends VectorAggregateExpression { @Override public void reset () { isNull = true; - value.zeroClear(); + value.set(HiveDecimal.ZERO); } } @@ -124,7 +125,7 @@ public class extends VectorAggregateExpression { DecimalColumnVector inputVector = (DecimalColumnVector)batch. cols[this.inputExpression.getOutputColumn()]; - Decimal128[] vector = inputVector.vector; + HiveDecimalWritable[] vector = inputVector.vector; if (inputVector.noNulls) { if (inputVector.isRepeating) { @@ -170,7 +171,7 @@ public class extends VectorAggregateExpression { private void iterateNoNullsRepeatingWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregrateIndex, - Decimal128 value, + HiveDecimalWritable value, short scale, int batchSize) { @@ -186,7 +187,7 @@ public class extends VectorAggregateExpression { private void iterateNoNullsSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregrateIndex, - Decimal128[] values, + HiveDecimalWritable[] values, short scale, int[] selection, int batchSize) { @@ -203,7 +204,7 @@ public class extends VectorAggregateExpression { private void iterateNoNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregrateIndex, - Decimal128[] values, + HiveDecimalWritable[] values, short scale, int batchSize) { for (int i=0; i < batchSize; ++i) { @@ -218,7 +219,7 @@ public class extends VectorAggregateExpression { private void iterateHasNullsRepeatingSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregrateIndex, - Decimal128 value, + HiveDecimalWritable value, short scale, int batchSize, int[] selection, @@ -239,7 +240,7 @@ public class extends VectorAggregateExpression { private void iterateHasNullsRepeatingWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregrateIndex, - Decimal128 value, + HiveDecimalWritable value, short scale, int batchSize, boolean[] isNull) { @@ -258,7 +259,7 @@ public class extends VectorAggregateExpression { private void iterateHasNullsSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregrateIndex, - Decimal128[] values, + HiveDecimalWritable[] values, short scale, int batchSize, int[] selection, @@ -279,7 +280,7 @@ public class extends VectorAggregateExpression { private void iterateHasNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregrateIndex, - Decimal128[] values, + HiveDecimalWritable[] values, short scale, int batchSize, boolean[] isNull) { @@ -312,13 +313,14 @@ public class extends VectorAggregateExpression { Aggregation myagg = (Aggregation)agg; - Decimal128[] vector = inputVector.vector; + HiveDecimalWritable[] vector = inputVector.vector; if (inputVector.isRepeating) { if (inputVector.noNulls && (myagg.isNull || (myagg.value.compareTo(vector[0]) 0))) { myagg.isNull = false; - myagg.value.update(vector[0], inputVector.scale); + HiveDecimal value = vector[0].getHiveDecimal(); + myagg.value.set(value); } return; } @@ -341,7 +343,7 @@ public class extends VectorAggregateExpression { private void iterateSelectionHasNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize, boolean[] isNull, @@ -350,13 +352,13 @@ public class extends VectorAggregateExpression { for (int j=0; j< batchSize; ++j) { int i = selected[j]; if (!isNull[i]) { - Decimal128 value = vector[i]; + HiveDecimal value = vector[i].getHiveDecimal(); if (myagg.isNull) { myagg.isNull = false; - myagg.value.update(value); + myagg.value.set(value); } - else if (myagg.value.compareTo(value) 0) { - myagg.value.update(value, scale); + else if (myagg.value.getHiveDecimal().compareTo(value) 0) { + myagg.value.set(value); } } } @@ -364,40 +366,41 @@ public class extends VectorAggregateExpression { private void iterateSelectionNoNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize, int[] selected) { if (myagg.isNull) { - myagg.value.update(vector[selected[0]]); + HiveDecimal value = vector[selected[0]].getHiveDecimal(); + myagg.value.set(value); myagg.isNull = false; } for (int i=0; i< batchSize; ++i) { - Decimal128 value = vector[selected[i]]; - if (myagg.value.compareTo(value) 0) { - myagg.value.update(value, scale); + HiveDecimal value = vector[selected[i]].getHiveDecimal(); + if (myagg.value.getHiveDecimal().compareTo(value) 0) { + myagg.value.set(value); } } } private void iterateNoSelectionHasNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize, boolean[] isNull) { for(int i=0;i 0) { - myagg.value.update(value, scale); + else if (myagg.value.getHiveDecimal().compareTo(value) 0) { + myagg.value.set(value); } } } @@ -405,18 +408,19 @@ public class extends VectorAggregateExpression { private void iterateNoSelectionNoNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize) { if (myagg.isNull) { - myagg.value.update(vector[0]); + HiveDecimal value = vector[0].getHiveDecimal(); + myagg.value.set(value); myagg.isNull = false; } for (int i=0;i 0) { - myagg.value.update(value, scale); + HiveDecimal value = vector[i].getHiveDecimal(); + if (myagg.value.getHiveDecimal().compareTo(value) 0) { + myagg.value.set(value); } } } diff --git ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt index 048c354..8fc94ba 100644 --- ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt +++ ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt @@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; @@ -86,12 +86,12 @@ public class extends VectorAggregateExpression { variance = 0f; } - public void updateValueWithCheckAndInit(Decimal128 value, short scale) { + public void updateValueWithCheckAndInit(HiveDecimalWritable value, short scale) { if (this.isNull) { this.init(); } - double dval = value.doubleValue(); + double dval = value.getHiveDecimal().doubleValue(); this.sum += dval; this.count += 1; if(this.count > 1) { @@ -100,8 +100,8 @@ public class extends VectorAggregateExpression { } } - public void updateValueNoCheck(Decimal128 value, short scale) { - double dval = value.doubleValue(); + public void updateValueNoCheck(HiveDecimalWritable value, short scale) { + double dval = value.getHiveDecimal().doubleValue(); this.sum += dval; this.count += 1; double t = this.count*dval - this.sum; @@ -176,7 +176,7 @@ public class extends VectorAggregateExpression { return; } - Decimal128[] vector = inputVector.vector; + HiveDecimalWritable[] vector = inputVector.vector; if (inputVector.isRepeating) { if (inputVector.noNulls || !inputVector.isNull[0]) { @@ -209,7 +209,7 @@ public class extends VectorAggregateExpression { private void iterateRepeatingNoNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, - Decimal128 value, + HiveDecimalWritable value, short scale, int batchSize) { @@ -225,7 +225,7 @@ public class extends VectorAggregateExpression { private void iterateSelectionHasNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize, boolean[] isNull, @@ -238,7 +238,7 @@ public class extends VectorAggregateExpression { j); int i = selected[j]; if (!isNull[i]) { - Decimal128 value = vector[i]; + HiveDecimalWritable value = vector[i]; myagg.updateValueWithCheckAndInit(value, scale); } } @@ -247,7 +247,7 @@ public class extends VectorAggregateExpression { private void iterateSelectionNoNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize, int[] selected) { @@ -257,7 +257,7 @@ public class extends VectorAggregateExpression { aggregationBufferSets, aggregateIndex, i); - Decimal128 value = vector[selected[i]]; + HiveDecimalWritable value = vector[selected[i]]; myagg.updateValueWithCheckAndInit(value, scale); } } @@ -265,7 +265,7 @@ public class extends VectorAggregateExpression { private void iterateNoSelectionHasNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize, boolean[] isNull) { @@ -276,7 +276,7 @@ public class extends VectorAggregateExpression { aggregationBufferSets, aggregateIndex, i); - Decimal128 value = vector[i]; + HiveDecimalWritable value = vector[i]; myagg.updateValueWithCheckAndInit(value, scale); } } @@ -285,7 +285,7 @@ public class extends VectorAggregateExpression { private void iterateNoSelectionNoNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize) { @@ -294,7 +294,7 @@ public class extends VectorAggregateExpression { aggregationBufferSets, aggregateIndex, i); - Decimal128 value = vector[i]; + HiveDecimalWritable value = vector[i]; myagg.updateValueWithCheckAndInit(value, scale); } } @@ -316,7 +316,7 @@ public class extends VectorAggregateExpression { Aggregation myagg = (Aggregation)agg; - Decimal128[] vector = inputVector.vector; + HiveDecimalWritable[] vector = inputVector.vector; if (inputVector.isRepeating) { if (inputVector.noNulls) { @@ -340,7 +340,7 @@ public class extends VectorAggregateExpression { private void iterateRepeatingNoNulls( Aggregation myagg, - Decimal128 value, + HiveDecimalWritable value, short scale, int batchSize) { @@ -357,7 +357,7 @@ public class extends VectorAggregateExpression { private void iterateSelectionHasNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize, boolean[] isNull, @@ -366,7 +366,7 @@ public class extends VectorAggregateExpression { for (int j=0; j< batchSize; ++j) { int i = selected[j]; if (!isNull[i]) { - Decimal128 value = vector[i]; + HiveDecimalWritable value = vector[i]; myagg.updateValueWithCheckAndInit(value, scale); } } @@ -374,7 +374,7 @@ public class extends VectorAggregateExpression { private void iterateSelectionNoNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize, int[] selected) { @@ -383,7 +383,7 @@ public class extends VectorAggregateExpression { myagg.init (); } - Decimal128 value = vector[selected[0]]; + HiveDecimalWritable value = vector[selected[0]]; myagg.updateValueWithCheckAndInit(value, scale); // i=0 was pulled out to remove the count > 1 check in the loop @@ -396,14 +396,14 @@ public class extends VectorAggregateExpression { private void iterateNoSelectionHasNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize, boolean[] isNull) { for(int i=0;i extends VectorAggregateExpression { private void iterateNoSelectionNoNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, short scale, int batchSize) { @@ -419,7 +419,7 @@ public class extends VectorAggregateExpression { myagg.init (); } - Decimal128 value = vector[0]; + HiveDecimalWritable value = vector[0]; myagg.updateValueWithCheckAndInit(value, scale); // i=0 was pulled out to remove count > 1 check diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/DecimalColumnVector.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/DecimalColumnVector.java index 8672922..5009a42 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/DecimalColumnVector.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/DecimalColumnVector.java @@ -17,26 +17,25 @@ */ package org.apache.hadoop.hive.ql.exec.vector; -import org.apache.hadoop.hive.common.type.Decimal128; + +import java.math.BigInteger; + +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Writable; public class DecimalColumnVector extends ColumnVector { /** - * A vector if Decimal128 objects. These are mutable and have fairly - * efficient operations on them. This will make it faster to load - * column vectors and perform decimal vector operations with decimal- - * specific VectorExpressions. + * A vector of HiveDecimalWritable objects. * * For high performance and easy access to this low-level structure, * the fields are public by design (as they are in other ColumnVector * types). */ - public Decimal128[] vector; + public HiveDecimalWritable[] vector; public short scale; public short precision; @@ -51,9 +50,9 @@ public DecimalColumnVector(int size, int precision, int scale) { this.precision = (short) precision; this.scale = (short) scale; final int len = size; - vector = new Decimal128[len]; + vector = new HiveDecimalWritable[len]; for (int i = 0; i < len; i++) { - vector[i] = new Decimal128(0, this.scale); + vector[i] = new HiveDecimalWritable(HiveDecimal.ZERO); } } @@ -65,8 +64,7 @@ public Writable getWritableObject(int index) { if (!noNulls && isNull[index]) { return NullWritable.get(); } else { - Decimal128 dec = vector[index]; - writableObj.set(HiveDecimal.create(dec.toBigDecimal())); + writableObj.set(vector[index]); return writableObj; } } @@ -78,22 +76,38 @@ public void flatten(boolean selectedInUse, int[] sel, int size) { @Override public void setElement(int outElementNum, int inputElementNum, ColumnVector inputVector) { - vector[outElementNum].update(((DecimalColumnVector) inputVector).vector[inputElementNum]); - vector[outElementNum].changeScaleDestructive(scale); + HiveDecimal hiveDec = ((DecimalColumnVector) inputVector).vector[inputElementNum].getHiveDecimal(precision, scale); + if (hiveDec == null) { + noNulls = false; + isNull[outElementNum] = true; + } else { + vector[outElementNum].set(hiveDec); + } } - /** - * Check if the value at position i fits in the available precision, - * and convert the value to NULL if it does not. - */ - public void checkPrecisionOverflow(int i) { - try { - vector[i].checkPrecisionOverflow(precision); - } catch (ArithmeticException e) { + public void set(int elementNum, HiveDecimalWritable writeable) { + HiveDecimal hiveDec = writeable.getHiveDecimal(precision, scale); + if (hiveDec == null) { + noNulls = false; + isNull[elementNum] = true; + } else { + vector[elementNum].set(hiveDec); + } + } - // If the value won't fit in the available precision, the result is NULL + public void set(int elementNum, HiveDecimal hiveDec) { + HiveDecimal checkedDec = HiveDecimal.enforcePrecisionScale(hiveDec, precision, scale); + if (checkedDec == null) { noNulls = false; - isNull[i] = true; + isNull[elementNum] = true; + } else { + vector[elementNum].set(checkedDec); } } + + public void setNullDataValue(int elementNum) { + // E.g. For scale 2 the minimum is "0.01" + HiveDecimal minimumNonZeroValue = HiveDecimal.create(BigInteger.ONE, scale); + vector[elementNum].set(minimumNonZeroValue); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java index 14ef79e..24b34b1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java @@ -23,7 +23,6 @@ import java.util.List; import java.util.Map; -import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; @@ -152,18 +151,12 @@ public void reset() { private static abstract class VectorDecimalColumnAssign extends VectorColumnAssignVectorBase { + protected void assignDecimal(HiveDecimal value, int index) { - outCol.vector[index].update(value.unscaledValue(), (byte) value.scale()); - } - - protected void assignDecimal(Decimal128 value, int index) { - outCol.vector[index].update(value); + outCol.set(index, value); } protected void assignDecimal(HiveDecimalWritable hdw, int index) { - byte[] internalStorage = hdw.getInternalStorage(); - int scale = hdw.getScale(); - - outCol.vector[index].fastUpdateFromInternalStorage(internalStorage, (short)scale); + outCol.set(index, hdw); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java index 96c4498..fabac38 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java @@ -113,9 +113,9 @@ public void copyGroupKey(VectorizedRowBatch inputBatch, VectorizedRowBatch outpu DecimalColumnVector outputColumnVector = (DecimalColumnVector) outputBatch.cols[keyIndex]; if (inputColumnVector.noNulls || !inputColumnVector.isNull[0]) { - // Since we store references to Decimal128 instances, we must use the update method instead + // Since we store references to HiveDecimalWritable instances, we must use the update method instead // of plain assignment. - outputColumnVector.vector[outputBatch.size].update(inputColumnVector.vector[0]); + outputColumnVector.set(outputBatch.size, inputColumnVector.vector[0]); } else { outputColumnVector.noNulls = false; outputColumnVector.isNull[outputBatch.size] = true; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java index 1c366df..91480af 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java @@ -20,7 +20,8 @@ import java.util.Arrays; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.KeyWrapper; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -40,7 +41,7 @@ private static final long[] EMPTY_LONG_ARRAY = new long[0]; private static final double[] EMPTY_DOUBLE_ARRAY = new double[0]; private static final byte[][] EMPTY_BYTES_ARRAY = new byte[0][]; - private static final Decimal128[] EMPTY_DECIMAL_ARRAY = new Decimal128[0]; + private static final HiveDecimalWritable[] EMPTY_DECIMAL_ARRAY = new HiveDecimalWritable[0]; private long[] longValues; private double[] doubleValues; @@ -49,7 +50,7 @@ private int[] byteStarts; private int[] byteLengths; - private Decimal128[] decimalValues; + private HiveDecimalWritable[] decimalValues; private boolean[] isNull; private int hashcode; @@ -58,9 +59,9 @@ public VectorHashKeyWrapper(int longValuesCount, int doubleValuesCount, int byteValuesCount, int decimalValuesCount) { longValues = longValuesCount > 0 ? new long[longValuesCount] : EMPTY_LONG_ARRAY; doubleValues = doubleValuesCount > 0 ? new double[doubleValuesCount] : EMPTY_DOUBLE_ARRAY; - decimalValues = decimalValuesCount > 0 ? new Decimal128[decimalValuesCount] : EMPTY_DECIMAL_ARRAY; + decimalValues = decimalValuesCount > 0 ? new HiveDecimalWritable[decimalValuesCount] : EMPTY_DECIMAL_ARRAY; for(int i = 0; i < decimalValuesCount; ++i) { - decimalValues[i] = new Decimal128(); + decimalValues[i] = new HiveDecimalWritable(HiveDecimal.ZERO); } if (byteValuesCount > 0) { byteValues = new byte[byteValuesCount][]; @@ -87,9 +88,12 @@ public void getNewKey(Object row, ObjectInspector rowInspector) throws HiveExcep public void setHashKey() { hashcode = Arrays.hashCode(longValues) ^ Arrays.hashCode(doubleValues) ^ - Arrays.hashCode(decimalValues) ^ Arrays.hashCode(isNull); + for (int i = 0; i < decimalValues.length; i++) { + hashcode ^= decimalValues[i].getHiveDecimal().hashCode(); + } + // This code, with branches and all, is not executed if there are no string keys for (int i = 0; i < byteValues.length; ++i) { /* @@ -165,10 +169,13 @@ public void duplicateTo(VectorHashKeyWrapper clone) { clone.doubleValues = doubleValues.clone(); clone.isNull = isNull.clone(); - // Decimal128 requires deep clone - clone.decimalValues = new Decimal128[decimalValues.length]; + // Decimal columns use HiveDecimalWritable and we set the value. + clone.decimalValues = new HiveDecimalWritable[decimalValues.length]; for(int i = 0; i < decimalValues.length; ++i) { - clone.decimalValues[i] = new Decimal128().update(decimalValues[i]); + clone.decimalValues[i] = new HiveDecimalWritable(); + + // Note that the set routine will enforce precision and scale. + clone.decimalValues[i].set(decimalValues[i]); } clone.byteValues = new byte[byteValues.length][]; @@ -234,8 +241,8 @@ public void assignNullString(int index) { isNull[longValues.length + doubleValues.length + index] = true; } - public void assignDecimal(int index, Decimal128 value) { - decimalValues[index].update(value); + public void assignDecimal(int index, HiveDecimalWritable value) { + decimalValues[index].set(value); isNull[longValues.length + doubleValues.length + byteValues.length + index] = false; } @@ -299,7 +306,7 @@ public boolean getIsDecimalNull(int i) { return isNull[longValues.length + doubleValues.length + byteValues.length + i]; } - public Decimal128 getDecimal(int i) { + public HiveDecimalWritable getDecimal(int i) { return decimalValues[i]; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java index 7bda38f..6333222 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java @@ -580,7 +580,7 @@ public Object getWritableKeyValue(VectorHashKeyWrapper kw, int i, } else if (klh.decimalIndex >= 0) { return kw.getIsDecimalNull(klh.decimalIndex)? null : keyOutputWriter.writeValue( - kw.getDecimal(klh.decimalIndex)); + kw.getDecimal(klh.decimalIndex).getHiveDecimal()); } else { throw new HiveException(String.format( diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 42ad37d..dbc9bb2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.exec.vector; import java.lang.reflect.Constructor; +import java.math.BigDecimal; import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; @@ -34,7 +35,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; @@ -868,7 +869,7 @@ private VectorExpression getConstantVectorExpression(Object constantValue, TypeI case FLOAT_FAMILY: return new ConstantVectorExpression(outCol, ((Number) constantValue).doubleValue()); case DECIMAL: - VectorExpression ve = new ConstantVectorExpression(outCol, (Decimal128) constantValue); + VectorExpression ve = new ConstantVectorExpression(outCol, (HiveDecimal) constantValue); // Set type name with decimal precision, scale, etc. ve.setOutputType(typeName); return ve; @@ -1237,9 +1238,9 @@ private VectorExpression getInExpression(List childExpr, Mode mode ((IDoubleInExpr) expr).setInListValues(inValsD); } else if (isDecimalFamily(colType)) { cl = (mode == Mode.FILTER ? FilterDecimalColumnInList.class : DecimalColumnInList.class); - Decimal128[] inValsD = new Decimal128[childrenForInList.size()]; + HiveDecimal[] inValsD = new HiveDecimal[childrenForInList.size()]; for (int i = 0; i != inValsD.length; i++) { - inValsD[i] = (Decimal128) getVectorTypeScalarValue( + inValsD[i] = (HiveDecimal) getVectorTypeScalarValue( (ExprNodeConstantDesc) childrenForInList.get(i)); } expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION, returnType); @@ -1287,44 +1288,43 @@ private VectorExpression getGenericUDFBridgeVectorExpression(GenericUDFBridge ud return null; } - private Decimal128 castConstantToDecimal(Object scalar, TypeInfo type) throws HiveException { + private HiveDecimal castConstantToDecimal(Object scalar, TypeInfo type) throws HiveException { PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type; - String typename = type.getTypeName(); - Decimal128 d = new Decimal128(); int scale = HiveDecimalUtils.getScaleForType(ptinfo); + String typename = type.getTypeName(); + HiveDecimal rawDecimal; switch (ptinfo.getPrimitiveCategory()) { case FLOAT: - float floatVal = ((Float) scalar).floatValue(); - d.update(floatVal, (short) scale); + rawDecimal = HiveDecimal.create(String.valueOf((Float) scalar)); break; case DOUBLE: - double doubleVal = ((Double) scalar).doubleValue(); - d.update(doubleVal, (short) scale); + rawDecimal = HiveDecimal.create(String.valueOf((Double) scalar)); break; case BYTE: - byte byteVal = ((Byte) scalar).byteValue(); - d.update(byteVal, (short) scale); + rawDecimal = HiveDecimal.create((Byte) scalar); break; case SHORT: - short shortVal = ((Short) scalar).shortValue(); - d.update(shortVal, (short) scale); + rawDecimal = HiveDecimal.create((Short) scalar); break; case INT: - int intVal = ((Integer) scalar).intValue(); - d.update(intVal, (short) scale); + rawDecimal = HiveDecimal.create((Integer) scalar); break; case LONG: - long longVal = ((Long) scalar).longValue(); - d.update(longVal, (short) scale); + rawDecimal = HiveDecimal.create((Long) scalar); break; case DECIMAL: - HiveDecimal decimalVal = (HiveDecimal) scalar; - d.update(decimalVal.unscaledValue(), (short) scale); + rawDecimal = (HiveDecimal) scalar; break; default: - throw new HiveException("Unsupported type "+typename+" for cast to Decimal128"); + throw new HiveException("Unsupported type " + typename + " for cast to HiveDecimal"); + } + if (rawDecimal == null) { + if (LOG.isDebugEnabled()) { + LOG.debug("Casting constant scalar " + scalar + " to HiveDecimal resulted in null"); + } + return null; } - return d; + return rawDecimal; } private String castConstantToString(Object scalar, TypeInfo type) throws HiveException { @@ -1391,7 +1391,7 @@ private VectorExpression getCastToDecimal(List childExpr, TypeInfo if (child instanceof ExprNodeConstantDesc) { // Return a constant vector expression Object constantValue = ((ExprNodeConstantDesc) child).getValue(); - Decimal128 decimalValue = castConstantToDecimal(constantValue, child.getTypeInfo()); + HiveDecimal decimalValue = castConstantToDecimal(constantValue, child.getTypeInfo()); return getConstantVectorExpression(decimalValue, returnType, Mode.PROJECTION); } else if (child instanceof ExprNodeNullDesc) { return getConstantVectorExpression(null, returnType, Mode.PROJECTION); @@ -1801,10 +1801,7 @@ private Object getScalarValue(ExprNodeConstantDesc constDesc) return 0; } } else if (decimalTypePattern.matcher(constDesc.getTypeString()).matches()) { - HiveDecimal hd = (HiveDecimal) constDesc.getValue(); - Decimal128 dvalue = new Decimal128(); - dvalue.update(hd.unscaledValue(), (short) hd.scale()); - return dvalue; + return (HiveDecimal) constDesc.getValue(); } else { return constDesc.getValue(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java index 7b9c0a7..e304cf8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java @@ -481,8 +481,7 @@ private static void setVector(Object row, if (writableCol != null) { dcv.isNull[rowIndex] = false; HiveDecimalWritable wobj = (HiveDecimalWritable) writableCol; - dcv.vector[rowIndex].update(wobj.getHiveDecimal().unscaledValue(), - (short) wobj.getScale()); + dcv.set(rowIndex, wobj); } else { setNullColIsNullValue(dcv, rowIndex); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index 4f57aac..250236e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -34,7 +34,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -557,7 +556,7 @@ public void addPartitionColsToBatch(VectorizedRowBatch batch) throws HiveExcepti dv.isRepeating = true; } else { HiveDecimal hd = (HiveDecimal) value; - dv.vector[0] = new Decimal128(hd.toString(), (short) hd.scale()); + dv.set(0, hd); dv.isRepeating = true; dv.isNull[0] = false; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java index 7bbe153..9621cd3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java @@ -41,6 +41,6 @@ public CastDecimalToBoolean(int inputColumn, int outputColumn) { * Otherwise, return 1 for true. */ protected void func(LongColumnVector outV, DecimalColumnVector inV, int i) { - outV.vector[i] = inV.vector[i].getSignum() == 0 ? 0 : 1; + outV.vector[i] = inV.vector[i].getHiveDecimal().signum() == 0 ? 0 : 1; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java index fa0143f..ea235d9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java @@ -52,9 +52,8 @@ public CastDecimalToDecimal() { * at position i in the respective vectors. */ protected void convert(DecimalColumnVector outV, DecimalColumnVector inV, int i) { - outV.vector[i].update(inV.vector[i]); - outV.vector[i].changeScaleDestructive(outV.scale); - outV.checkPrecisionOverflow(i); + // The set routine enforces precision and scale. + outV.vector[i].set(inV.vector[i]); } /** diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java index 9cf97f4..63d878d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java @@ -34,6 +34,6 @@ public CastDecimalToDouble(int inputCol, int outputCol) { } protected void func(DoubleColumnVector outV, DecimalColumnVector inV, int i) { - outV.vector[i] = inV.vector[i].doubleValue(); + outV.vector[i] = inV.vector[i].getHiveDecimal().doubleValue(); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java index d5f34d5..045f0ab 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java @@ -37,6 +37,6 @@ public CastDecimalToLong(int inputColumn, int outputColumn) { @Override protected void func(LongColumnVector outV, DecimalColumnVector inV, int i) { - outV.vector[i] = inV.vector[i].longValue(); + outV.vector[i] = inV.vector[i].getHiveDecimal().longValue(); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java index 6d01498..3378842 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalToStringUnaryUDF; /** * To support vectorized cast of decimal to string. @@ -43,7 +44,7 @@ protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { @Override protected void func(BytesColumnVector outV, DecimalColumnVector inV, int i) { - String s = inV.vector[i].getHiveDecimalString(); + String s = inV.vector[i].getHiveDecimal().toString(); byte[] b = null; try { b = s.getBytes("UTF-8"); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java index 0a445f4..a52cf19 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.common.type.Decimal128; -import org.apache.hadoop.hive.common.type.SqlMathUtil; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -31,34 +30,23 @@ public class CastDecimalToTimestamp extends FuncDecimalToLong { private static final long serialVersionUID = 1L; - /* The field tmp is a scratch variable for this operation. It is - * purposely not made static because if this code is ever made multi-threaded, - * each thread will then have its own VectorExpression tree and thus - * its own copy of the variable. - */ - private transient Decimal128 tmp = null; - private static transient Decimal128 tenE9 = new Decimal128(1000000000); + private static transient HiveDecimal tenE9 = HiveDecimal.create(1000000000); public CastDecimalToTimestamp(int inputColumn, int outputColumn) { super(inputColumn, outputColumn); - tmp = new Decimal128(0); } public CastDecimalToTimestamp() { - - // initialize local field after deserialization - tmp = new Decimal128(0); } @Override protected void func(LongColumnVector outV, DecimalColumnVector inV, int i) { - tmp.update(inV.vector[i]); - - // Reduce scale at most by 9, therefore multiplication will not require rounding. - int newScale = inV.scale > 9 ? (inV.scale - 9) : 0; - tmp.multiplyDestructive(tenE9, (short) newScale); - - // set output - outV.vector[i] = tmp.longValue(); + HiveDecimal result = inV.vector[i].getHiveDecimal().multiply(tenE9); + if (result == null) { + outV.noNulls = false; + outV.isNull[i] = true; + } else { + outV.vector[i] = result.longValue(); + } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java index 36a1fcb..6d6b588 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; @@ -38,7 +39,7 @@ public CastDoubleToDecimal(int inputColumn, int outputColumn) { @Override protected void func(DecimalColumnVector outV, DoubleColumnVector inV, int i) { - outV.vector[i].update(inV.vector[i], outV.scale); - outV.checkPrecisionOverflow(i); + String s = ((Double) inV.vector[i]).toString(); + outV.vector[i].set(HiveDecimal.create(s)); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java index d1a4977..ba8bcae 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -40,7 +41,6 @@ public CastLongToDecimal(int inputColumn, int outputColumn) { @Override protected void func(DecimalColumnVector outV, LongColumnVector inV, int i) { - outV.vector[i].update(inV.vector[i], outV.scale); - outV.checkPrecisionOverflow(i); + outV.vector[i].set(HiveDecimal.create(inV.vector[i])); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java index e58fc9a..504b354 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; @@ -58,14 +59,13 @@ protected void func(DecimalColumnVector outV, BytesColumnVector inV, int i) { * making a new string. */ s = new String(inV.vector[i], inV.start[i], inV.length[i], "UTF-8"); - outV.vector[i].update(s, outV.scale); + outV.vector[i].set(HiveDecimal.create(s)); } catch (Exception e) { // for any exception in conversion to decimal, produce NULL outV.noNulls = false; outV.isNull[i] = true; } - outV.checkPrecisionOverflow(i); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java index 0f9874b..0aedddc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -39,9 +40,10 @@ public CastTimestampToDecimal(int inputColumn, int outputColumn) { @Override protected void func(DecimalColumnVector outV, LongColumnVector inV, int i) { - // the resulting decimal value is 10e-9 * the input long value. - outV.vector[i].updateFixedPoint(inV.vector[i], (short) 9); - outV.vector[i].changeScaleDestructive(outV.scale); - outV.checkPrecisionOverflow(i); + // The resulting decimal value is 10e-9 * the input long value (i.e. seconds). + // + HiveDecimal result = HiveDecimal.create(inV.vector[i]); + result = result.scaleByPowerOfTen(-9); + outV.set(i, result); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java index 23124c3..c76b15b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java @@ -20,7 +20,7 @@ import java.util.Arrays; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.vector.*; @@ -44,7 +44,7 @@ protected long longValue = 0; private double doubleValue = 0; private byte[] bytesValue = null; - private Decimal128 decimalValue = null; + private HiveDecimal decimalValue = null; private boolean isNullValue = false; private Type type; @@ -85,7 +85,7 @@ public ConstantVectorExpression(int outputColumn, HiveVarchar value) { setBytesValue(value.getValue().getBytes()); } - public ConstantVectorExpression(int outputColumn, Decimal128 value) { + public ConstantVectorExpression(int outputColumn, HiveDecimal value) { this(outputColumn, "decimal"); setDecimalValue(value); } @@ -137,7 +137,7 @@ private void evaluateDecimal(VectorizedRowBatch vrg) { dcv.isRepeating = true; dcv.noNulls = !isNullValue; if (!isNullValue) { - dcv.vector[0].update(decimalValue); + dcv.vector[0].set(decimalValue); } else { dcv.isNull[0] = true; } @@ -191,7 +191,7 @@ public void setBytesValue(byte[] bytesValue) { this.bytesValueLength = bytesValue.length; } - public void setDecimalValue(Decimal128 decimalValue) { + public void setDecimalValue(HiveDecimal decimalValue) { this.decimalValue = decimalValue; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java index 7756ebd..0601c66 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java @@ -18,11 +18,12 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import java.util.HashSet; @@ -32,11 +33,11 @@ public class DecimalColumnInList extends VectorExpression implements IDecimalInExpr { private static final long serialVersionUID = 1L; private int inputCol; - private Decimal128[] inListValues; + private HiveDecimal[] inListValues; private int outputColumn; // The set object containing the IN list. - private transient HashSet inSet; + private transient HashSet inSet; public DecimalColumnInList() { super(); @@ -60,8 +61,8 @@ public void evaluate(VectorizedRowBatch batch) { } if (inSet == null) { - inSet = new HashSet(inListValues.length); - for (Decimal128 val : inListValues) { + inSet = new HashSet(inListValues.length); + for (HiveDecimal val : inListValues) { inSet.add(val); } } @@ -72,7 +73,7 @@ public void evaluate(VectorizedRowBatch batch) { boolean[] nullPos = inputColVector.isNull; boolean[] outNulls = outputColVector.isNull; int n = batch.size; - Decimal128[] vector = inputColVector.vector; + HiveDecimalWritable[] vector = inputColVector.vector; long[] outputVector = outputColVector.vector; // return immediately if batch is empty @@ -87,16 +88,16 @@ public void evaluate(VectorizedRowBatch batch) { // All must be selected otherwise size would be zero // Repeating property will not change. - outputVector[0] = inSet.contains(vector[0]) ? 1 : 0; + outputVector[0] = inSet.contains(vector[0].getHiveDecimal()) ? 1 : 0; outputColVector.isRepeating = true; } else if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - outputVector[i] = inSet.contains(vector[i]) ? 1 : 0; + outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 0; } } else { for(int i = 0; i != n; i++) { - outputVector[i] = inSet.contains(vector[i]) ? 1 : 0; + outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 0; } } } else { @@ -105,7 +106,7 @@ public void evaluate(VectorizedRowBatch batch) { //All must be selected otherwise size would be zero //Repeating property will not change. if (!nullPos[0]) { - outputVector[0] = inSet.contains(vector[0]) ? 1 : 0; + outputVector[0] = inSet.contains(vector[0].getHiveDecimal()) ? 1 : 0; outNulls[0] = false; } else { outNulls[0] = true; @@ -116,14 +117,14 @@ public void evaluate(VectorizedRowBatch batch) { int i = sel[j]; outNulls[i] = nullPos[i]; if (!nullPos[i]) { - outputVector[i] = inSet.contains(vector[i]) ? 1 : 0; + outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 0; } } } else { System.arraycopy(nullPos, 0, outNulls, 0, n); for(int i = 0; i != n; i++) { if (!nullPos[i]) { - outputVector[i] = inSet.contains(vector[i]) ? 1 : 0; + outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 0; } } } @@ -148,11 +149,7 @@ public Descriptor getDescriptor() { return null; } - public Decimal128[] getInListValues() { - return this.inListValues; - } - - public void setInListValues(Decimal128[] a) { + public void setInListValues(HiveDecimal[] a) { this.inListValues = a; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java index cdc0ea0..d88f7d7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java @@ -18,32 +18,60 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.common.type.HiveDecimal; -import org.apache.hadoop.hive.common.type.SqlMathUtil; -import org.apache.hadoop.hive.common.type.UnsignedInt128; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.udf.generic.RoundUtils; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; /** * Utility functions for vector operations on decimal values. */ public class DecimalUtil { - public static final Decimal128 DECIMAL_ONE = new Decimal128(); - private static final UnsignedInt128 scratchUInt128 = new UnsignedInt128(); + public static int compare(HiveDecimalWritable writableLeft, HiveDecimal right) { + return writableLeft.getHiveDecimal().compareTo(right); + } - static { - DECIMAL_ONE.update(1L, (short) 0); + public static int compare(HiveDecimal left, HiveDecimalWritable writableRight) { + return left.compareTo(writableRight.getHiveDecimal()); } // Addition with overflow check. Overflow produces NULL output. - public static void addChecked(int i, Decimal128 left, Decimal128 right, + public static void addChecked(int i, HiveDecimal left, HiveDecimal right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.add(right)); + } catch (ArithmeticException e) { // catch on overflow + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void addChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.getHiveDecimal().add(right.getHiveDecimal())); + } catch (ArithmeticException e) { // catch on overflow + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void addChecked(int i, HiveDecimalWritable left, HiveDecimal right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.getHiveDecimal().add(right)); + } catch (ArithmeticException e) { // catch on overflow + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void addChecked(int i, HiveDecimal left, HiveDecimalWritable right, DecimalColumnVector outputColVector) { try { - Decimal128.add(left, right, outputColVector.vector[i], outputColVector.scale); - outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision); + outputColVector.set(i, left.add(right.getHiveDecimal())); } catch (ArithmeticException e) { // catch on overflow outputColVector.noNulls = false; outputColVector.isNull[i] = true; @@ -51,11 +79,40 @@ public static void addChecked(int i, Decimal128 left, Decimal128 right, } // Subtraction with overflow check. Overflow produces NULL output. - public static void subtractChecked(int i, Decimal128 left, Decimal128 right, + public static void subtractChecked(int i, HiveDecimal left, HiveDecimal right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.subtract(right)); + } catch (ArithmeticException e) { // catch on overflow + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void subtractChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.getHiveDecimal().subtract(right.getHiveDecimal())); + } catch (ArithmeticException e) { // catch on overflow + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void subtractChecked(int i, HiveDecimalWritable left, HiveDecimal right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.getHiveDecimal().subtract(right)); + } catch (ArithmeticException e) { // catch on overflow + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void subtractChecked(int i, HiveDecimal left, HiveDecimalWritable right, DecimalColumnVector outputColVector) { try { - Decimal128.subtract(left, right, outputColVector.vector[i], outputColVector.scale); - outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision); + outputColVector.set(i, left.subtract(right.getHiveDecimal())); } catch (ArithmeticException e) { // catch on overflow outputColVector.noNulls = false; outputColVector.isNull[i] = true; @@ -63,11 +120,40 @@ public static void subtractChecked(int i, Decimal128 left, Decimal128 right, } // Multiplication with overflow check. Overflow produces NULL output. - public static void multiplyChecked(int i, Decimal128 left, Decimal128 right, + public static void multiplyChecked(int i, HiveDecimal left, HiveDecimal right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.multiply(right)); + } catch (ArithmeticException e) { // catch on overflow + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void multiplyChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.getHiveDecimal().multiply(right.getHiveDecimal())); + } catch (ArithmeticException e) { // catch on overflow + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void multiplyChecked(int i, HiveDecimalWritable left, HiveDecimal right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.getHiveDecimal().multiply(right)); + } catch (ArithmeticException e) { // catch on overflow + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void multiplyChecked(int i, HiveDecimal left, HiveDecimalWritable right, DecimalColumnVector outputColVector) { try { - Decimal128.multiply(left, right, outputColVector.vector[i], outputColVector.scale); - outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision); + outputColVector.set(i, left.multiply(right.getHiveDecimal())); } catch (ArithmeticException e) { // catch on overflow outputColVector.noNulls = false; outputColVector.isNull[i] = true; @@ -75,11 +161,40 @@ public static void multiplyChecked(int i, Decimal128 left, Decimal128 right, } // Division with overflow/zero-divide check. Error produces NULL output. - public static void divideChecked(int i, Decimal128 left, Decimal128 right, + public static void divideChecked(int i, HiveDecimal left, HiveDecimal right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.divide(right)); + } catch (ArithmeticException e) { // catch on error + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void divideChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.getHiveDecimal().divide(right.getHiveDecimal())); + } catch (ArithmeticException e) { // catch on error + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void divideChecked(int i, HiveDecimalWritable left, HiveDecimal right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.getHiveDecimal().divide(right)); + } catch (ArithmeticException e) { // catch on error + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void divideChecked(int i, HiveDecimal left, HiveDecimalWritable right, DecimalColumnVector outputColVector) { try { - Decimal128.divide(left, right, outputColVector.vector[i], outputColVector.scale); - outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision); + outputColVector.set(i, left.divide(right.getHiveDecimal())); } catch (ArithmeticException e) { // catch on error outputColVector.noNulls = false; outputColVector.isNull[i] = true; @@ -87,80 +202,138 @@ public static void divideChecked(int i, Decimal128 left, Decimal128 right, } // Modulo operator with overflow/zero-divide check. - public static void moduloChecked(int i, Decimal128 left, Decimal128 right, + public static void moduloChecked(int i, HiveDecimal left, HiveDecimal right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.remainder(right)); + } catch (ArithmeticException e) { // catch on error + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void moduloChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.getHiveDecimal().remainder(right.getHiveDecimal())); + } catch (ArithmeticException e) { // catch on error + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void moduloChecked(int i, HiveDecimalWritable left, HiveDecimal right, + DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, left.getHiveDecimal().remainder(right)); + } catch (ArithmeticException e) { // catch on error + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void moduloChecked(int i, HiveDecimal left, HiveDecimalWritable right, DecimalColumnVector outputColVector) { try { - Decimal128.modulo(left, right, outputColVector.vector[i], outputColVector.scale); - outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision); + outputColVector.set(i, left.remainder(right.getHiveDecimal())); } catch (ArithmeticException e) { // catch on error outputColVector.noNulls = false; outputColVector.isNull[i] = true; } } - public static void floor(int i, Decimal128 input, DecimalColumnVector outputColVector) { + public static void floor(int i, HiveDecimal input, DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, input.setScale(0, HiveDecimal.ROUND_FLOOR)); + } catch (ArithmeticException e) { + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void floor(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, input.getHiveDecimal().setScale(0, HiveDecimal.ROUND_FLOOR)); + } catch (ArithmeticException e) { + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void ceiling(int i, HiveDecimal input, DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, input.setScale(0, HiveDecimal.ROUND_CEILING)); + } catch (ArithmeticException e) { + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void ceiling(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, input.getHiveDecimal().setScale(0, HiveDecimal.ROUND_CEILING)); + } catch (ArithmeticException e) { + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } + } + + public static void round(int i, HiveDecimal input, DecimalColumnVector outputColVector) { try { - Decimal128 result = outputColVector.vector[i]; - result.update(input); - result.zeroFractionPart(scratchUInt128); - result.changeScaleDestructive(outputColVector.scale); - if ((result.compareTo(input) != 0) && input.getSignum() < 0) { - result.subtractDestructive(DECIMAL_ONE, outputColVector.scale); - } + outputColVector.set(i, RoundUtils.round(input, outputColVector.scale)); } catch (ArithmeticException e) { outputColVector.noNulls = false; outputColVector.isNull[i] = true; } } - public static void ceiling(int i, Decimal128 input, DecimalColumnVector outputColVector) { + public static void round(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) { try { - Decimal128 result = outputColVector.vector[i]; - result.update(input); - result.zeroFractionPart(scratchUInt128); - result.changeScaleDestructive(outputColVector.scale); - if ((result.compareTo(input) != 0) && input.getSignum() > 0) { - result.addDestructive(DECIMAL_ONE, outputColVector.scale); - } + outputColVector.set(i, RoundUtils.round(input.getHiveDecimal(), outputColVector.scale)); } catch (ArithmeticException e) { outputColVector.noNulls = false; outputColVector.isNull[i] = true; } } - public static void round(int i, Decimal128 input, DecimalColumnVector outputColVector) { - HiveDecimal inputHD = HiveDecimal.create(input.toBigDecimal()); - HiveDecimal result = RoundUtils.round(inputHD, outputColVector.scale); - if (result == null) { + public static void sign(int i, HiveDecimal input, LongColumnVector outputColVector) { + outputColVector.vector[i] = input.signum(); + } + + public static void sign(int i, HiveDecimalWritable input, LongColumnVector outputColVector) { + outputColVector.vector[i] = input.getHiveDecimal().signum(); + } + + public static void abs(int i, HiveDecimal input, DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, input.abs()); + } catch (ArithmeticException e) { outputColVector.noNulls = false; outputColVector.isNull[i] = true; - } else { - outputColVector.vector[i].update(result.bigDecimalValue().toPlainString(), outputColVector.scale); } } - public static void sign(int i, Decimal128 input, LongColumnVector outputColVector) { - outputColVector.vector[i] = input.getSignum(); + public static void abs(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) { + try { + outputColVector.set(i, input.getHiveDecimal().abs()); + } catch (ArithmeticException e) { + outputColVector.noNulls = false; + outputColVector.isNull[i] = true; + } } - public static void abs(int i, Decimal128 input, DecimalColumnVector outputColVector) { - Decimal128 result = outputColVector.vector[i]; + public static void negate(int i, HiveDecimal input, DecimalColumnVector outputColVector) { try { - result.update(input); - result.absDestructive(); - result.changeScaleDestructive(outputColVector.scale); + outputColVector.set(i, input.negate()); } catch (ArithmeticException e) { outputColVector.noNulls = false; outputColVector.isNull[i] = true; } } - public static void negate(int i, Decimal128 input, DecimalColumnVector outputColVector) { - Decimal128 result = outputColVector.vector[i]; + public static void negate(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) { try { - result.update(input); - result.negateDestructive(); - result.changeScaleDestructive(outputColVector.scale); + outputColVector.set(i, input.getHiveDecimal().negate()); } catch (ArithmeticException e) { outputColVector.noNulls = false; outputColVector.isNull[i] = true; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java index 82f1787..a865343 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java @@ -18,10 +18,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import java.util.HashSet; @@ -31,10 +32,10 @@ public class FilterDecimalColumnInList extends VectorExpression implements IDecimalInExpr { private static final long serialVersionUID = 1L; private int inputCol; - private Decimal128[] inListValues; + private HiveDecimal[] inListValues; // The set object containing the IN list. - private transient HashSet inSet; + private transient HashSet inSet; public FilterDecimalColumnInList() { super(); @@ -57,8 +58,8 @@ public void evaluate(VectorizedRowBatch batch) { } if (inSet == null) { - inSet = new HashSet(inListValues.length); - for (Decimal128 val : inListValues) { + inSet = new HashSet(inListValues.length); + for (HiveDecimal val : inListValues) { inSet.add(val); } } @@ -67,7 +68,7 @@ public void evaluate(VectorizedRowBatch batch) { int[] sel = batch.selected; boolean[] nullPos = inputColVector.isNull; int n = batch.size; - Decimal128[] vector = inputColVector.vector; + HiveDecimalWritable[] vector = inputColVector.vector; // return immediately if batch is empty if (n == 0) { @@ -80,7 +81,7 @@ public void evaluate(VectorizedRowBatch batch) { // All must be selected otherwise size would be zero // Repeating property will not change. - if (!(inSet.contains(vector[0]))) { + if (!(inSet.contains(vector[0].getHiveDecimal()))) { //Entire batch is filtered out. batch.size = 0; } @@ -88,7 +89,7 @@ public void evaluate(VectorizedRowBatch batch) { int newSize = 0; for(int j = 0; j != n; j++) { int i = sel[j]; - if (inSet.contains(vector[i])) { + if (inSet.contains(vector[i].getHiveDecimal())) { sel[newSize++] = i; } } @@ -96,7 +97,7 @@ public void evaluate(VectorizedRowBatch batch) { } else { int newSize = 0; for(int i = 0; i != n; i++) { - if (inSet.contains(vector[i])) { + if (inSet.contains(vector[i].getHiveDecimal())) { sel[newSize++] = i; } } @@ -111,7 +112,7 @@ public void evaluate(VectorizedRowBatch batch) { //All must be selected otherwise size would be zero //Repeating property will not change. if (!nullPos[0]) { - if (!inSet.contains(vector[0])) { + if (!inSet.contains(vector[0].getHiveDecimal())) { //Entire batch is filtered out. batch.size = 0; @@ -124,7 +125,7 @@ public void evaluate(VectorizedRowBatch batch) { for(int j = 0; j != n; j++) { int i = sel[j]; if (!nullPos[i]) { - if (inSet.contains(vector[i])) { + if (inSet.contains(vector[i].getHiveDecimal())) { sel[newSize++] = i; } } @@ -136,7 +137,7 @@ public void evaluate(VectorizedRowBatch batch) { int newSize = 0; for(int i = 0; i != n; i++) { if (!nullPos[i]) { - if (inSet.contains(vector[i])) { + if (inSet.contains(vector[i].getHiveDecimal())) { sel[newSize++] = i; } } @@ -167,11 +168,7 @@ public Descriptor getDescriptor() { return null; } - public Decimal128[] getInListValues() { - return this.inListValues; - } - - public void setInListValues(Decimal128[] a) { + public void setInListValues(HiveDecimal[] a) { this.inListValues = a; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java index eeac8f1..4691fe1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; @@ -117,18 +116,6 @@ public int getOutputColumn() { return outputColumn; } - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - - public int getInputColumn() { - return inputColumn; - } - - public void setInputColumn(int inputColumn) { - this.inputColumn = inputColumn; - } - @Override public String getOutputType() { return "long"; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java index 92cedf0..0120c0a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java index 6b1a0cf..b73e851 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java index 4c6c44d..421a737 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java @@ -21,9 +21,9 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import java.util.Arrays; @@ -61,7 +61,7 @@ public void evaluate(VectorizedRowBatch batch) { boolean[] outputIsNull = outputColVector.isNull; outputColVector.noNulls = inputColVector.noNulls; int n = batch.size; - Decimal128[] vector = inputColVector.vector; + HiveDecimalWritable[] vector = inputColVector.vector; // return immediately if batch is empty if (n == 0) { @@ -119,27 +119,6 @@ public int getOutputColumn() { public String getOutputType() { return outputType; } - - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - - public int getDecimalPlaces() { - return decimalPlaces; - } - - public void setDecimalPlaces(int decimalPlaces) { - this.decimalPlaces = decimalPlaces; - } - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java index 81a8d1d..40132af 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.common.type.HiveDecimal; public interface IDecimalInExpr { - void setInListValues(Decimal128[] inVals); + void setInListValues(HiveDecimal[] inVals); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java index 5cc8218..773b40a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java @@ -300,18 +300,18 @@ public static void setNullDataEntriesDecimal( if (v.noNulls) { return; } else if (v.isRepeating && v.isNull[0]) { - v.vector[0].setNullDataValue(); + v.setNullDataValue(0); } else if (selectedInUse) { for (int j = 0; j != n; j++) { int i = sel[j]; if(v.isNull[i]) { - v.vector[i].setNullDataValue(); + v.setNullDataValue(i); } } } else { for (int i = 0; i != n; i++) { if(v.isNull[i]) { - v.vector[i].setNullDataValue(); + v.setNullDataValue(i); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java index be5cea8..d91b880 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java @@ -18,9 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.Writable; @@ -34,7 +35,8 @@ Object writeValue(long value) throws HiveException; Object writeValue(double value) throws HiveException; Object writeValue(byte[] value, int start, int length) throws HiveException; - Object writeValue(Decimal128 value) throws HiveException; + Object writeValue(HiveDecimalWritable value) throws HiveException; + Object writeValue(HiveDecimal value) throws HiveException; Object setValue(Object row, ColumnVector column, int columnRow) throws HiveException; Object initValue(Object ost) throws HiveException; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java index 85f3e24..94a47e0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.math.BigDecimal; import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; @@ -27,7 +26,6 @@ import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; @@ -134,14 +132,29 @@ public Object setValue(Object field, byte[] value, int start, int length) throws * The base implementation must be overridden by the Decimal specialization */ @Override - public Object writeValue(Decimal128 value) throws HiveException { + public Object writeValue(HiveDecimal value) throws HiveException { throw new HiveException("Internal error: should not reach here"); } /** * The base implementation must be overridden by the Decimal specialization */ - public Object setValue(Object field, Decimal128 value) throws HiveException { + @Override + public Object writeValue(HiveDecimalWritable value) throws HiveException { + throw new HiveException("Internal error: should not reach here"); + } + + /** + * The base implementation must be overridden by the Decimal specialization + */ + public Object setValue(Object field, HiveDecimalWritable value) throws HiveException { + throw new HiveException("Internal error: should not reach here"); + } + + /** + * The base implementation must be overridden by the Decimal specialization + */ + public Object setValue(Object field, HiveDecimal value) throws HiveException { throw new HiveException("Internal error: should not reach here"); } } @@ -465,24 +478,35 @@ public VectorExpressionWriter init(SettableHiveDecimalObjectInspector objInspect } @Override - public Object writeValue(Decimal128 value) throws HiveException { - return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(obj, - HiveDecimal.create(value.toBigDecimal())); + public Object writeValue(HiveDecimalWritable value) throws HiveException { + return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(obj, value); + } + + @Override + public Object writeValue(HiveDecimal value) throws HiveException { + return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(obj, value); + } + + @Override + public Object setValue(Object field, HiveDecimalWritable value) { + if (null == field) { + field = initValue(null); + } + return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(field, value); } @Override - public Object setValue(Object field, Decimal128 value) { + public Object setValue(Object field, HiveDecimal value) { if (null == field) { field = initValue(null); } - return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(field, - HiveDecimal.create(value.toBigDecimal())); + return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(field, value); } @Override public Object initValue(Object ignored) { return ((SettableHiveDecimalObjectInspector) this.objectInspector).create( - HiveDecimal.create(BigDecimal.ZERO)); + HiveDecimal.ZERO); } }.init(fieldObjInspector); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java index 658e33c..9cc0621 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java @@ -21,9 +21,9 @@ import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; @@ -41,7 +41,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; -import org.apache.hive.common.util.Decimal128FastBuffer; /** * Generated from template VectorUDAFAvg.txt. @@ -57,24 +56,45 @@ private static final long serialVersionUID = 1L; - transient private final Decimal128 sum = new Decimal128(); + transient private final HiveDecimalWritable sum = new HiveDecimalWritable(); transient private long count; transient private boolean isNull; - public void sumValueWithCheck(Decimal128 value, short scale) { + // We use this to catch overflow. + transient private boolean isOutOfRange; + + public void sumValueWithNullCheck(HiveDecimalWritable writable, short scale) { + if (isOutOfRange) { + return; + } + HiveDecimal value = writable.getHiveDecimal(); if (isNull) { - sum.update(value); - sum.changeScaleDestructive(scale); + sum.set(value); count = 1; isNull = false; } else { - sum.addDestructive(value, scale); + HiveDecimal result; + try { + result = sum.getHiveDecimal().add(value); + } catch (ArithmeticException e) { // catch on overflow + isOutOfRange = true; + return; + } + sum.set(result); count++; } } - public void sumValueNoCheck(Decimal128 value, short scale) { - sum.addDestructive(value, scale); + public void sumValueNoNullCheck(HiveDecimalWritable writable, short scale) { + HiveDecimal value = writable.getHiveDecimal(); + HiveDecimal result; + try { + result = sum.getHiveDecimal().add(value); + } catch (ArithmeticException e) { // catch on overflow + isOutOfRange = true; + return; + } + sum.set(result); count++; } @@ -87,7 +107,8 @@ public int getVariableSize() { @Override public void reset() { isNull = true; - sum.zeroClear(); + isOutOfRange = false; + sum.set(HiveDecimal.ZERO); count = 0L; } } @@ -98,8 +119,6 @@ public void reset() { transient private HiveDecimalWritable resultSum; transient private StructObjectInspector soi; - transient private final Decimal128FastBuffer scratch; - /** * The scale of the SUM in the partial output */ @@ -120,12 +139,6 @@ public void reset() { */ private short inputPrecision; - /** - * A value used as scratch to avoid allocating at runtime. - * Needed by computations like vector[0] * batchSize - */ - transient private Decimal128 scratchDecimal = new Decimal128(); - public VectorUDAFAvgDecimal(VectorExpression inputExpression) { this(); this.inputExpression = inputExpression; @@ -138,7 +151,6 @@ public VectorUDAFAvgDecimal() { resultSum = new HiveDecimalWritable(); partialResult[0] = resultCount; partialResult[1] = resultSum; - scratch = new Decimal128FastBuffer(); } @@ -185,7 +197,7 @@ public void aggregateInputSelection( DecimalColumnVector inputVector = ( DecimalColumnVector)batch. cols[this.inputExpression.getOutputColumn()]; - Decimal128[] vector = inputVector.vector; + HiveDecimalWritable[] vector = inputVector.vector; if (inputVector.noNulls) { if (inputVector.isRepeating) { @@ -231,7 +243,7 @@ public void aggregateInputSelection( private void iterateNoNullsRepeatingWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int bufferIndex, - Decimal128 value, + HiveDecimalWritable value, int batchSize) { for (int i=0; i < batchSize; ++i) { @@ -239,14 +251,14 @@ private void iterateNoNullsRepeatingWithAggregationSelection( aggregationBufferSets, bufferIndex, i); - myagg.sumValueWithCheck(value, this.sumScale); + myagg.sumValueWithNullCheck(value, this.sumScale); } } private void iterateNoNullsSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int bufferIndex, - Decimal128[] values, + HiveDecimalWritable[] values, int[] selection, int batchSize) { @@ -255,28 +267,28 @@ private void iterateNoNullsSelectionWithAggregationSelection( aggregationBufferSets, bufferIndex, i); - myagg.sumValueWithCheck(values[selection[i]], this.sumScale); + myagg.sumValueWithNullCheck(values[selection[i]], this.sumScale); } } private void iterateNoNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int bufferIndex, - Decimal128[] values, + HiveDecimalWritable[] values, int batchSize) { for (int i=0; i < batchSize; ++i) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, bufferIndex, i); - myagg.sumValueWithCheck(values[i], this.sumScale); + myagg.sumValueWithNullCheck(values[i], this.sumScale); } } private void iterateHasNullsRepeatingSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int bufferIndex, - Decimal128 value, + HiveDecimalWritable value, int batchSize, int[] selection, boolean[] isNull) { @@ -287,7 +299,7 @@ private void iterateHasNullsRepeatingSelectionWithAggregationSelection( aggregationBufferSets, bufferIndex, i); - myagg.sumValueWithCheck(value, this.sumScale); + myagg.sumValueWithNullCheck(value, this.sumScale); } } @@ -296,7 +308,7 @@ private void iterateHasNullsRepeatingSelectionWithAggregationSelection( private void iterateHasNullsRepeatingWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int bufferIndex, - Decimal128 value, + HiveDecimalWritable value, int batchSize, boolean[] isNull) { @@ -306,7 +318,7 @@ private void iterateHasNullsRepeatingWithAggregationSelection( aggregationBufferSets, bufferIndex, i); - myagg.sumValueWithCheck(value, this.sumScale); + myagg.sumValueWithNullCheck(value, this.sumScale); } } } @@ -314,7 +326,7 @@ private void iterateHasNullsRepeatingWithAggregationSelection( private void iterateHasNullsSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int bufferIndex, - Decimal128[] values, + HiveDecimalWritable[] values, int batchSize, int[] selection, boolean[] isNull) { @@ -326,7 +338,7 @@ private void iterateHasNullsSelectionWithAggregationSelection( aggregationBufferSets, bufferIndex, j); - myagg.sumValueWithCheck(values[i], this.sumScale); + myagg.sumValueWithNullCheck(values[i], this.sumScale); } } } @@ -334,7 +346,7 @@ private void iterateHasNullsSelectionWithAggregationSelection( private void iterateHasNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int bufferIndex, - Decimal128[] values, + HiveDecimalWritable[] values, int batchSize, boolean[] isNull) { @@ -344,7 +356,7 @@ private void iterateHasNullsWithAggregationSelection( aggregationBufferSets, bufferIndex, i); - myagg.sumValueWithCheck(values[i], this.sumScale); + myagg.sumValueWithNullCheck(values[i], this.sumScale); } } } @@ -367,18 +379,31 @@ public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch) Aggregation myagg = (Aggregation)agg; - Decimal128[] vector = inputVector.vector; + HiveDecimalWritable[] vector = inputVector.vector; if (inputVector.isRepeating) { if (inputVector.noNulls) { if (myagg.isNull) { myagg.isNull = false; - myagg.sum.zeroClear(); + myagg.sum.set(HiveDecimal.ZERO); myagg.count = 0; } - scratchDecimal.update(batchSize); - scratchDecimal.multiplyDestructive(vector[0], vector[0].getScale()); - myagg.sum.update(scratchDecimal); + HiveDecimal value = vector[0].getHiveDecimal(); + HiveDecimal multiple; + try { + multiple = value.multiply(HiveDecimal.create(batchSize)); + } catch (ArithmeticException e) { // catch on overflow + myagg.isOutOfRange = true; + return; + } + HiveDecimal result; + try { + result = myagg.sum.getHiveDecimal().add(multiple); + } catch (ArithmeticException e) { // catch on overflow + myagg.isOutOfRange = true; + return; + } + myagg.sum.set(result); myagg.count += batchSize; } return; @@ -400,7 +425,7 @@ else if (inputVector.noNulls){ private void iterateSelectionHasNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, int batchSize, boolean[] isNull, int[] selected) { @@ -408,57 +433,57 @@ private void iterateSelectionHasNulls( for (int j=0; j< batchSize; ++j) { int i = selected[j]; if (!isNull[i]) { - Decimal128 value = vector[i]; - myagg.sumValueWithCheck(value, this.sumScale); + HiveDecimalWritable value = vector[i]; + myagg.sumValueWithNullCheck(value, this.sumScale); } } } private void iterateSelectionNoNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, int batchSize, int[] selected) { if (myagg.isNull) { myagg.isNull = false; - myagg.sum.zeroClear(); + myagg.sum.set(HiveDecimal.ZERO); myagg.count = 0; } for (int i=0; i< batchSize; ++i) { - Decimal128 value = vector[selected[i]]; - myagg.sumValueNoCheck(value, this.sumScale); + HiveDecimalWritable value = vector[selected[i]]; + myagg.sumValueNoNullCheck(value, this.sumScale); } } private void iterateNoSelectionHasNulls( Aggregation myagg, - Decimal128[] vector, + HiveDecimalWritable[] vector, int batchSize, boolean[] isNull) { for(int i=0;i cast('1234567890123456789012345678.12345678' as decimal(38,18)) LIMIT 1; +SELECT dec * 12345678901234567890.12345678 FROM DECIMAL_PRECISION LIMIT 1; + +SELECT MIN(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION; +SELECT COUNT(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION; + +DROP TABLE DECIMAL_PRECISION_txt; +DROP TABLE DECIMAL_PRECISION; diff --git ql/src/test/queries/clientpositive/vector_decimal_trailing.q ql/src/test/queries/clientpositive/vector_decimal_trailing.q new file mode 100644 index 0000000..4639fee --- /dev/null +++ ql/src/test/queries/clientpositive/vector_decimal_trailing.q @@ -0,0 +1,30 @@ +SET hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=minimal; + +DROP TABLE IF EXISTS DECIMAL_TRAILING_txt; +DROP TABLE IF EXISTS DECIMAL_TRAILING; + +CREATE TABLE DECIMAL_TRAILING_txt ( + id int, + a decimal(10,4), + b decimal(15,8) + ) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ',' +STORED AS TEXTFILE; + +LOAD DATA LOCAL INPATH '../../data/files/kv10.txt' INTO TABLE DECIMAL_TRAILING_txt; + +CREATE TABLE DECIMAL_TRAILING ( + id int, + a decimal(10,4), + b decimal(15,8) + ) +STORED AS ORC; + +INSERT OVERWRITE TABLE DECIMAL_TRAILING SELECT * FROM DECIMAL_TRAILING_txt; + +SELECT * FROM DECIMAL_TRAILING ORDER BY id; + +DROP TABLE DECIMAL_TRAILING_txt; +DROP TABLE DECIMAL_TRAILING; diff --git ql/src/test/queries/clientpositive/vector_decimal_udf.q ql/src/test/queries/clientpositive/vector_decimal_udf.q new file mode 100644 index 0000000..072abf2 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_decimal_udf.q @@ -0,0 +1,142 @@ +SET hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=minimal; + +DROP TABLE IF EXISTS DECIMAL_UDF_txt; +DROP TABLE IF EXISTS DECIMAL_UDF; + +CREATE TABLE DECIMAL_UDF_txt (key decimal(20,10), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE; + +LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF_txt; + +CREATE TABLE DECIMAL_UDF (key decimal(20,10), value int) +STORED AS ORC; + +INSERT OVERWRITE TABLE DECIMAL_UDF SELECT * FROM DECIMAL_UDF_txt; + +-- addition +EXPLAIN SELECT key + key FROM DECIMAL_UDF; +SELECT key + key FROM DECIMAL_UDF; + +EXPLAIN SELECT key + value FROM DECIMAL_UDF; +SELECT key + value FROM DECIMAL_UDF; + +EXPLAIN SELECT key + (value/2) FROM DECIMAL_UDF; +SELECT key + (value/2) FROM DECIMAL_UDF; + +EXPLAIN SELECT key + '1.0' FROM DECIMAL_UDF; +SELECT key + '1.0' FROM DECIMAL_UDF; + +-- substraction +EXPLAIN SELECT key - key FROM DECIMAL_UDF; +SELECT key - key FROM DECIMAL_UDF; + +EXPLAIN SELECT key - value FROM DECIMAL_UDF; +SELECT key - value FROM DECIMAL_UDF; + +EXPLAIN SELECT key - (value/2) FROM DECIMAL_UDF; +SELECT key - (value/2) FROM DECIMAL_UDF; + +EXPLAIN SELECT key - '1.0' FROM DECIMAL_UDF; +SELECT key - '1.0' FROM DECIMAL_UDF; + +-- multiplication +EXPLAIN SELECT key * key FROM DECIMAL_UDF; +SELECT key * key FROM DECIMAL_UDF; + +EXPLAIN SELECT key, value FROM DECIMAL_UDF where key * value > 0; +SELECT key, value FROM DECIMAL_UDF where key * value > 0; + +EXPLAIN SELECT key * value FROM DECIMAL_UDF; +SELECT key * value FROM DECIMAL_UDF; + +EXPLAIN SELECT key * (value/2) FROM DECIMAL_UDF; +SELECT key * (value/2) FROM DECIMAL_UDF; + +EXPLAIN SELECT key * '2.0' FROM DECIMAL_UDF; +SELECT key * '2.0' FROM DECIMAL_UDF; + +-- division +EXPLAIN SELECT key / 0 FROM DECIMAL_UDF limit 1; +SELECT key / 0 FROM DECIMAL_UDF limit 1; + +EXPLAIN SELECT key / NULL FROM DECIMAL_UDF limit 1; +SELECT key / NULL FROM DECIMAL_UDF limit 1; + +EXPLAIN SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0; +SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0; + +EXPLAIN SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0; +SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0; + +EXPLAIN SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0; +SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0; + +EXPLAIN SELECT 1 + (key / '2.0') FROM DECIMAL_UDF; +SELECT 1 + (key / '2.0') FROM DECIMAL_UDF; + +-- abs +EXPLAIN SELECT abs(key) FROM DECIMAL_UDF; +SELECT abs(key) FROM DECIMAL_UDF; + +-- avg +EXPLAIN SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value; +SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value; + +-- negative +EXPLAIN SELECT -key FROM DECIMAL_UDF; +SELECT -key FROM DECIMAL_UDF; + +-- positive +EXPLAIN SELECT +key FROM DECIMAL_UDF; +SELECT +key FROM DECIMAL_UDF; + +-- ceiling +EXPlAIN SELECT CEIL(key) FROM DECIMAL_UDF; +SELECT CEIL(key) FROM DECIMAL_UDF; + +-- floor +EXPLAIN SELECT FLOOR(key) FROM DECIMAL_UDF; +SELECT FLOOR(key) FROM DECIMAL_UDF; + +-- round +EXPLAIN SELECT ROUND(key, 2) FROM DECIMAL_UDF; +SELECT ROUND(key, 2) FROM DECIMAL_UDF; + +-- power +EXPLAIN SELECT POWER(key, 2) FROM DECIMAL_UDF; +SELECT POWER(key, 2) FROM DECIMAL_UDF; + +-- modulo +EXPLAIN SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF; +SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF; + +-- stddev, var +EXPLAIN SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value; +SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value; + +-- stddev_samp, var_samp +EXPLAIN SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value; +SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value; + +-- histogram +EXPLAIN SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF; +SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF; + +-- min +EXPLAIN SELECT MIN(key) FROM DECIMAL_UDF; +SELECT MIN(key) FROM DECIMAL_UDF; + +-- max +EXPLAIN SELECT MAX(key) FROM DECIMAL_UDF; +SELECT MAX(key) FROM DECIMAL_UDF; + +-- count +EXPLAIN SELECT COUNT(key) FROM DECIMAL_UDF; +SELECT COUNT(key) FROM DECIMAL_UDF; + +DROP TABLE IF EXISTS DECIMAL_UDF_txt; +DROP TABLE IF EXISTS DECIMAL_UDF; + diff --git ql/src/test/queries/clientpositive/vector_decimal_udf2.q ql/src/test/queries/clientpositive/vector_decimal_udf2.q new file mode 100644 index 0000000..8fb0e15 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_decimal_udf2.q @@ -0,0 +1,40 @@ +SET hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=minimal; + +DROP TABLE IF EXISTS DECIMAL_UDF2_txt; +DROP TABLE IF EXISTS DECIMAL_UDF2; + +CREATE TABLE DECIMAL_UDF2_txt (key decimal(20,10), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE; + +LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF2_txt; + +CREATE TABLE DECIMAL_UDF2 (key decimal(20,10), value int) +STORED AS ORC; + +INSERT OVERWRITE TABLE DECIMAL_UDF2 SELECT * FROM DECIMAL_UDF2_txt; + +EXPLAIN +SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2 WHERE key = 10; + +SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2 WHERE key = 10; + +EXPLAIN +SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2 WHERE key = 10; + +SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2 WHERE key = 10; + +DROP TABLE IF EXISTS DECIMAL_UDF2_txt; +DROP TABLE IF EXISTS DECIMAL_UDF2; diff --git ql/src/test/results/clientpositive/tez/acid_vectorization_partition.q.out ql/src/test/results/clientpositive/tez/acid_vectorization_partition.q.out new file mode 100644 index 0000000..3aa0e1a --- /dev/null +++ ql/src/test/results/clientpositive/tez/acid_vectorization_partition.q.out @@ -0,0 +1,60 @@ +PREHOOK: query: CREATE TABLE acid_vectorized_part(a INT, b STRING) partitioned by (ds string) CLUSTERED BY(a) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@acid_vectorized_part +POSTHOOK: query: CREATE TABLE acid_vectorized_part(a INT, b STRING) partitioned by (ds string) CLUSTERED BY(a) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@acid_vectorized_part +PREHOOK: query: insert into table acid_vectorized_part partition (ds = 'today') select cint, cstring1 from alltypesorc where cint is not null order by cint limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@acid_vectorized_part@ds=today +POSTHOOK: query: insert into table acid_vectorized_part partition (ds = 'today') select cint, cstring1 from alltypesorc where cint is not null order by cint limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@acid_vectorized_part@ds=today +POSTHOOK: Lineage: acid_vectorized_part PARTITION(ds=today).a SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: acid_vectorized_part PARTITION(ds=today).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +PREHOOK: query: insert into table acid_vectorized_part partition (ds = 'tomorrow') select cint, cstring1 from alltypesorc where cint is not null order by cint limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@acid_vectorized_part@ds=tomorrow +POSTHOOK: query: insert into table acid_vectorized_part partition (ds = 'tomorrow') select cint, cstring1 from alltypesorc where cint is not null order by cint limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@acid_vectorized_part@ds=tomorrow +POSTHOOK: Lineage: acid_vectorized_part PARTITION(ds=tomorrow).a SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: acid_vectorized_part PARTITION(ds=tomorrow).b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +PREHOOK: query: select * from acid_vectorized_part order by a, b +PREHOOK: type: QUERY +PREHOOK: Input: default@acid_vectorized_part +PREHOOK: Input: default@acid_vectorized_part@ds=today +PREHOOK: Input: default@acid_vectorized_part@ds=tomorrow +#### A masked pattern was here #### +POSTHOOK: query: select * from acid_vectorized_part order by a, b +POSTHOOK: type: QUERY +POSTHOOK: Input: default@acid_vectorized_part +POSTHOOK: Input: default@acid_vectorized_part@ds=today +POSTHOOK: Input: default@acid_vectorized_part@ds=tomorrow +#### A masked pattern was here #### +-1073279343 oj1YrV5Wa today +-1073279343 oj1YrV5Wa tomorrow +-1073051226 A34p7oRr2WvUJNf today +-1073051226 A34p7oRr2WvUJNf tomorrow +-1072910839 0iqrc5 today +-1072910839 0iqrc5 tomorrow +-1072081801 dPkN74F7 today +-1072081801 dPkN74F7 tomorrow +-1072076362 2uLyD28144vklju213J1mr tomorrow +-1072076362 2uLyD28144vklju213J1mr today +-1071480828 aw724t8c5558x2xneC624 today +-1071480828 aw724t8c5558x2xneC624 tomorrow +-1071363017 Anj0oF today +-1071363017 Anj0oF tomorrow +-1070883071 0ruyd6Y50JpdGRf6HqD tomorrow +-1070883071 0ruyd6Y50JpdGRf6HqD today +-1070551679 iUR3Q today +-1070551679 iUR3Q tomorrow +-1069736047 k17Am8uPHWk02cEf1jet tomorrow +-1069736047 k17Am8uPHWk02cEf1jet today diff --git ql/src/test/results/clientpositive/tez/acid_vectorization_project.q.out ql/src/test/results/clientpositive/tez/acid_vectorization_project.q.out new file mode 100644 index 0000000..1bdacb9 --- /dev/null +++ ql/src/test/results/clientpositive/tez/acid_vectorization_project.q.out @@ -0,0 +1,73 @@ +PREHOOK: query: CREATE TABLE acid_vectorized(a INT, b STRING, c float) CLUSTERED BY(a) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@acid_vectorized +POSTHOOK: query: CREATE TABLE acid_vectorized(a INT, b STRING, c float) CLUSTERED BY(a) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@acid_vectorized +PREHOOK: query: insert into table acid_vectorized select cint, cstring1, cfloat from alltypesorc where cint is not null order by cint limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@acid_vectorized +POSTHOOK: query: insert into table acid_vectorized select cint, cstring1, cfloat from alltypesorc where cint is not null order by cint limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@acid_vectorized +POSTHOOK: Lineage: acid_vectorized.a SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] +POSTHOOK: Lineage: acid_vectorized.b SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] +POSTHOOK: Lineage: acid_vectorized.c SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] +PREHOOK: query: select a,b from acid_vectorized order by a +PREHOOK: type: QUERY +PREHOOK: Input: default@acid_vectorized +#### A masked pattern was here #### +POSTHOOK: query: select a,b from acid_vectorized order by a +POSTHOOK: type: QUERY +POSTHOOK: Input: default@acid_vectorized +#### A masked pattern was here #### +-1073279343 oj1YrV5Wa +-1073051226 A34p7oRr2WvUJNf +-1072910839 0iqrc5 +-1072081801 dPkN74F7 +-1072076362 2uLyD28144vklju213J1mr +-1071480828 aw724t8c5558x2xneC624 +-1071363017 Anj0oF +-1070883071 0ruyd6Y50JpdGRf6HqD +-1070551679 iUR3Q +-1069736047 k17Am8uPHWk02cEf1jet +PREHOOK: query: select a,c from acid_vectorized order by a +PREHOOK: type: QUERY +PREHOOK: Input: default@acid_vectorized +#### A masked pattern was here #### +POSTHOOK: query: select a,c from acid_vectorized order by a +POSTHOOK: type: QUERY +POSTHOOK: Input: default@acid_vectorized +#### A masked pattern was here #### +-1073279343 11.0 +-1073051226 NULL +-1072910839 11.0 +-1072081801 NULL +-1072076362 NULL +-1071480828 -51.0 +-1071363017 8.0 +-1070883071 NULL +-1070551679 NULL +-1069736047 11.0 +PREHOOK: query: select b,c from acid_vectorized order by b +PREHOOK: type: QUERY +PREHOOK: Input: default@acid_vectorized +#### A masked pattern was here #### +POSTHOOK: query: select b,c from acid_vectorized order by b +POSTHOOK: type: QUERY +POSTHOOK: Input: default@acid_vectorized +#### A masked pattern was here #### +0iqrc5 11.0 +0ruyd6Y50JpdGRf6HqD NULL +2uLyD28144vklju213J1mr NULL +A34p7oRr2WvUJNf NULL +Anj0oF 8.0 +aw724t8c5558x2xneC624 -51.0 +dPkN74F7 NULL +iUR3Q NULL +k17Am8uPHWk02cEf1jet 11.0 +oj1YrV5Wa 11.0 diff --git ql/src/test/results/clientpositive/tez/vector_aggregate_9.q.out ql/src/test/results/clientpositive/tez/vector_aggregate_9.q.out new file mode 100644 index 0000000..cb3e015 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_aggregate_9.q.out @@ -0,0 +1,173 @@ +PREHOOK: query: create table vectortab2k( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + dc decimal(38,18), + bo boolean, + s string, + s2 string, + ts timestamp, + ts2 timestamp, + dt date) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vectortab2k +POSTHOOK: query: create table vectortab2k( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + dc decimal(38,18), + bo boolean, + s string, + s2 string, + ts timestamp, + ts2 timestamp, + dt date) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vectortab2k +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/vectortab2k' OVERWRITE INTO TABLE vectortab2k +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@vectortab2k +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/vectortab2k' OVERWRITE INTO TABLE vectortab2k +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@vectortab2k +PREHOOK: query: create table vectortab2korc( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + dc decimal(38,18), + bo boolean, + s string, + s2 string, + ts timestamp, + ts2 timestamp, + dt date) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vectortab2korc +POSTHOOK: query: create table vectortab2korc( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + dc decimal(38,18), + bo boolean, + s string, + s2 string, + ts timestamp, + ts2 timestamp, + dt date) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vectortab2korc +PREHOOK: query: INSERT INTO TABLE vectortab2korc SELECT * FROM vectortab2k +PREHOOK: type: QUERY +PREHOOK: Input: default@vectortab2k +PREHOOK: Output: default@vectortab2korc +POSTHOOK: query: INSERT INTO TABLE vectortab2korc SELECT * FROM vectortab2k +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vectortab2k +POSTHOOK: Output: default@vectortab2korc +POSTHOOK: Lineage: vectortab2korc.b SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:b, type:bigint, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.bo SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:bo, type:boolean, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.d SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:d, type:double, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.dc SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:dc, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: vectortab2korc.dt SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:dt, type:date, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.f SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:f, type:float, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.i SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:i, type:int, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.s SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:s, type:string, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.s2 SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:s2, type:string, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.si SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:si, type:smallint, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.t SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:t, type:tinyint, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.ts SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:ts, type:timestamp, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.ts2 SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:ts2, type:timestamp, comment:null), ] +PREHOOK: query: explain +select min(dc), max(dc), sum(dc), avg(dc) from vectortab2korc +PREHOOK: type: QUERY +POSTHOOK: query: explain +select min(dc), max(dc), sum(dc), avg(dc) from vectortab2korc +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: vectortab2korc + Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dc (type: decimal(38,18)) + outputColumnNames: dc + Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: min(dc), max(dc), sum(dc), avg(dc) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(38,18)), _col1 (type: decimal(38,18)), _col2 (type: decimal(38,18)), _col3 (type: struct) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: min(VALUE._col0), max(VALUE._col1), sum(VALUE._col2), avg(VALUE._col3) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 448 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: decimal(38,18)), _col1 (type: decimal(38,18)), _col2 (type: decimal(38,18)), _col3 (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 448 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 448 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select min(dc), max(dc), sum(dc), avg(dc) from vectortab2korc +PREHOOK: type: QUERY +PREHOOK: Input: default@vectortab2korc +#### A masked pattern was here #### +POSTHOOK: query: select min(dc), max(dc), sum(dc), avg(dc) from vectortab2korc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vectortab2korc +#### A masked pattern was here #### +-4997414117561.546875 4994550248722.298828 -10252745435816.024410 -5399023399.587163986308583465 diff --git ql/src/test/results/clientpositive/tez/vector_between_in.q.out ql/src/test/results/clientpositive/tez/vector_between_in.q.out index a7037f7..ad8c4c0 100644 --- ql/src/test/results/clientpositive/tez/vector_between_in.q.out +++ ql/src/test/results/clientpositive/tez/vector_between_in.q.out @@ -718,15 +718,15 @@ POSTHOOK: Input: default@decimal_date_test 14.9324324324 19.1135135135 20.3081081081 -22.1000000000 +22.1 24.4891891892 33.4486486486 34.6432432432 40.0189189189 42.4081081081 43.0054054054 -44.2000000000 -44.2000000000 +44.2 +44.2 44.7972972973 45.9918918919 PREHOOK: query: SELECT COUNT(*) FROM decimal_date_test WHERE cdecimal1 NOT BETWEEN -2000 AND 4390.1351351351 diff --git ql/src/test/results/clientpositive/tez/vector_coalesce.q.out ql/src/test/results/clientpositive/tez/vector_coalesce.q.out new file mode 100644 index 0000000..646df51 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_coalesce.q.out @@ -0,0 +1,196 @@ +PREHOOK: query: EXPLAIN SELECT cdouble, cstring1, cint, cfloat, csmallint, coalesce(cdouble, cstring1, cint, cfloat, csmallint) +FROM alltypesorc +WHERE (cdouble IS NULL) LIMIT 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT cdouble, cstring1, cint, cfloat, csmallint, coalesce(cdouble, cstring1, cint, cfloat, csmallint) +FROM alltypesorc +WHERE (cdouble IS NULL) LIMIT 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + TableScan + alias: alltypesorc + Filter Operator + predicate: cdouble is null (type: boolean) + Select Operator + expressions: null (type: void), cstring1 (type: string), cint (type: int), cfloat (type: float), csmallint (type: smallint), COALESCE(null,cstring1,cint,cfloat,csmallint) (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Limit + Number of rows: 10 + ListSink + +PREHOOK: query: SELECT cdouble, cstring1, cint, cfloat, csmallint, coalesce(cdouble, cstring1, cint, cfloat, csmallint) +FROM alltypesorc +WHERE (cdouble IS NULL) LIMIT 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: SELECT cdouble, cstring1, cint, cfloat, csmallint, coalesce(cdouble, cstring1, cint, cfloat, csmallint) +FROM alltypesorc +WHERE (cdouble IS NULL) LIMIT 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +NULL LFgU5WT87C2yJ4W4YU0r8Pp -285355633 -51.0 NULL LFgU5WT87C2yJ4W4YU0r8Pp +NULL 75bFXC7TqGo1SEaYAx4C58m NULL -51.0 NULL 75bFXC7TqGo1SEaYAx4C58m +NULL v3p153e2bSkGS70v04G 354670578 -51.0 NULL v3p153e2bSkGS70v04G +NULL 0pOH7A4O8aQ37NuBqn 951003458 -51.0 NULL 0pOH7A4O8aQ37NuBqn +NULL 8ShAFcD734S8Q26WjMwpq0Q 164554497 -51.0 NULL 8ShAFcD734S8Q26WjMwpq0Q +NULL nOF31ehjY7ULCHMf 455419170 -51.0 NULL nOF31ehjY7ULCHMf +NULL t32s57Cjt4a250qQgVNAB5T -109813638 -51.0 NULL t32s57Cjt4a250qQgVNAB5T +NULL nvO822k30OaH37Il 665801232 -51.0 NULL nvO822k30OaH37Il +NULL M152O -601502867 -51.0 NULL M152O +NULL FgJ7Hft6845s1766oyt82q 199879534 -51.0 NULL FgJ7Hft6845s1766oyt82q +PREHOOK: query: EXPLAIN SELECT ctinyint, cdouble, cint, coalesce(ctinyint+10, (cdouble+log2(cint)), 0) +FROM alltypesorc +WHERE (ctinyint IS NULL) LIMIT 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT ctinyint, cdouble, cint, coalesce(ctinyint+10, (cdouble+log2(cint)), 0) +FROM alltypesorc +WHERE (ctinyint IS NULL) LIMIT 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + TableScan + alias: alltypesorc + Filter Operator + predicate: ctinyint is null (type: boolean) + Select Operator + expressions: ctinyint (type: tinyint), cdouble (type: double), cint (type: int), COALESCE((ctinyint + 10),(cdouble + log2(cint)),0) (type: double) + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + Number of rows: 10 + ListSink + +PREHOOK: query: SELECT ctinyint, cdouble, cint, coalesce(ctinyint+10, (cdouble+log2(cint)), 0) +FROM alltypesorc +WHERE (ctinyint IS NULL) LIMIT 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: SELECT ctinyint, cdouble, cint, coalesce(ctinyint+10, (cdouble+log2(cint)), 0) +FROM alltypesorc +WHERE (ctinyint IS NULL) LIMIT 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +NULL -4213.0 528534767 -4184.022576865738 +NULL -3012.0 528534767 -2983.0225768657383 +NULL -4016.0 528534767 -3987.0225768657383 +NULL -11534.0 528534767 -11505.022576865738 +NULL -6147.0 528534767 -6118.022576865738 +NULL -7680.0 528534767 -7651.022576865738 +NULL -7314.0 528534767 -7285.022576865738 +NULL 11254.0 528534767 11282.977423134262 +NULL 13889.0 528534767 13917.977423134262 +NULL 3321.0 528534767 3349.9774231342617 +PREHOOK: query: EXPLAIN SELECT cfloat, cbigint, coalesce(cfloat, cbigint, 0) +FROM alltypesorc +WHERE (cfloat IS NULL AND cbigint IS NULL) LIMIT 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT cfloat, cbigint, coalesce(cfloat, cbigint, 0) +FROM alltypesorc +WHERE (cfloat IS NULL AND cbigint IS NULL) LIMIT 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + TableScan + alias: alltypesorc + Filter Operator + predicate: (cfloat is null and cbigint is null) (type: boolean) + Select Operator + expressions: null (type: void), null (type: void), COALESCE(null,null,0) (type: float) + outputColumnNames: _col0, _col1, _col2 + Limit + Number of rows: 10 + ListSink + +PREHOOK: query: SELECT cfloat, cbigint, coalesce(cfloat, cbigint, 0) +FROM alltypesorc +WHERE (cfloat IS NULL AND cbigint IS NULL) LIMIT 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: SELECT cfloat, cbigint, coalesce(cfloat, cbigint, 0) +FROM alltypesorc +WHERE (cfloat IS NULL AND cbigint IS NULL) LIMIT 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +NULL NULL 0 +NULL NULL 0 +NULL NULL 0 +NULL NULL 0 +NULL NULL 0 +NULL NULL 0 +NULL NULL 0 +NULL NULL 0 +NULL NULL 0 +NULL NULL 0 +PREHOOK: query: EXPLAIN SELECT ctimestamp1, ctimestamp2, coalesce(ctimestamp1, ctimestamp2) +FROM alltypesorc +WHERE ctimestamp1 IS NOT NULL OR ctimestamp2 IS NOT NULL LIMIT 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT ctimestamp1, ctimestamp2, coalesce(ctimestamp1, ctimestamp2) +FROM alltypesorc +WHERE ctimestamp1 IS NOT NULL OR ctimestamp2 IS NOT NULL LIMIT 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + TableScan + alias: alltypesorc + Filter Operator + predicate: (ctimestamp1 is not null or ctimestamp2 is not null) (type: boolean) + Select Operator + expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), COALESCE(ctimestamp1,ctimestamp2) (type: timestamp) + outputColumnNames: _col0, _col1, _col2 + Limit + Number of rows: 10 + ListSink + +PREHOOK: query: SELECT ctimestamp1, ctimestamp2, coalesce(ctimestamp1, ctimestamp2) +FROM alltypesorc +WHERE ctimestamp1 IS NOT NULL OR ctimestamp2 IS NOT NULL LIMIT 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: SELECT ctimestamp1, ctimestamp2, coalesce(ctimestamp1, ctimestamp2) +FROM alltypesorc +WHERE ctimestamp1 IS NOT NULL OR ctimestamp2 IS NOT NULL LIMIT 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +1969-12-31 15:59:46.674 1969-12-31 16:00:08.875 1969-12-31 15:59:46.674 +NULL 1969-12-31 16:00:13.589 1969-12-31 16:00:13.589 +1969-12-31 15:59:55.787 1969-12-31 16:00:01.546 1969-12-31 15:59:55.787 +1969-12-31 15:59:44.187 1969-12-31 16:00:06.961 1969-12-31 15:59:44.187 +1969-12-31 15:59:50.434 1969-12-31 16:00:13.352 1969-12-31 15:59:50.434 +1969-12-31 16:00:15.007 1969-12-31 16:00:15.148 1969-12-31 16:00:15.007 +1969-12-31 16:00:07.021 1969-12-31 16:00:02.997 1969-12-31 16:00:07.021 +1969-12-31 16:00:04.963 1969-12-31 15:59:56.474 1969-12-31 16:00:04.963 +1969-12-31 15:59:52.176 1969-12-31 16:00:07.787 1969-12-31 15:59:52.176 +1969-12-31 15:59:44.569 1969-12-31 15:59:51.665 1969-12-31 15:59:44.569 diff --git ql/src/test/results/clientpositive/tez/vector_decimal_1.q.out ql/src/test/results/clientpositive/tez/vector_decimal_1.q.out new file mode 100644 index 0000000..58c107a --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_1.q.out @@ -0,0 +1,591 @@ +PREHOOK: query: drop table if exists decimal_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists decimal_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table decimal_1 (t decimal(4,2), u decimal(5), v decimal) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_1 +POSTHOOK: query: create table decimal_1 (t decimal(4,2), u decimal(5), v decimal) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_1 +PREHOOK: query: desc decimal_1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@decimal_1 +POSTHOOK: query: desc decimal_1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@decimal_1 +t decimal(4,2) +u decimal(5,0) +v decimal(10,0) +PREHOOK: query: insert overwrite table decimal_1 + select cast('17.29' as decimal(4,2)), 3.1415926BD, 3115926.54321BD from src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@decimal_1 +POSTHOOK: query: insert overwrite table decimal_1 + select cast('17.29' as decimal(4,2)), 3.1415926BD, 3115926.54321BD from src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@decimal_1 +POSTHOOK: Lineage: decimal_1.t EXPRESSION [] +POSTHOOK: Lineage: decimal_1.u EXPRESSION [] +POSTHOOK: Lineage: decimal_1.v EXPRESSION [] +PREHOOK: query: explain +select cast(t as boolean) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as boolean) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToBoolean(t) (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: boolean) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as boolean) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as boolean) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +true +PREHOOK: query: explain +select cast(t as tinyint) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as tinyint) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToByte(t) (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: tinyint) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as tinyint) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as tinyint) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as smallint) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as smallint) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToShort(t) (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as smallint) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as smallint) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as int) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as int) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToInteger(t) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as int) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as int) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as bigint) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as bigint) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToLong(t) (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as bigint) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as bigint) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as float) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as float) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToFloat(t) (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: float) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as float) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as float) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17.29 +PREHOOK: query: explain +select cast(t as double) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as double) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToDouble(t) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: double) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as double) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as double) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17.29 +PREHOOK: query: explain +select cast(t as string) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as string) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToString(t) (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as string) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as string) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17.29 +PREHOOK: query: explain +select cast(t as timestamp) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as timestamp) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: CAST( t AS TIMESTAMP) (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as timestamp) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as timestamp) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +1969-12-31 16:00:17.29 +PREHOOK: query: drop table decimal_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_1 +PREHOOK: Output: default@decimal_1 +POSTHOOK: query: drop table decimal_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_1 +POSTHOOK: Output: default@decimal_1 diff --git ql/src/test/results/clientpositive/tez/vector_decimal_10_0.q.out ql/src/test/results/clientpositive/tez/vector_decimal_10_0.q.out new file mode 100644 index 0000000..8974bb8 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_10_0.q.out @@ -0,0 +1,112 @@ +PREHOOK: query: DROP TABLE IF EXISTS decimal_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS decimal_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS decimal +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS decimal +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE decimal_txt (dec decimal) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_txt +POSTHOOK: query: CREATE TABLE decimal_txt (dec decimal) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal_10_0.txt' OVERWRITE INTO TABLE decimal_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal_10_0.txt' OVERWRITE INTO TABLE decimal_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_txt +PREHOOK: query: CREATE TABLE DECIMAL STORED AS ORC AS SELECT * FROM decimal_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@decimal_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL +POSTHOOK: query: CREATE TABLE DECIMAL STORED AS ORC AS SELECT * FROM decimal_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@decimal_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL +PREHOOK: query: EXPLAIN +SELECT dec FROM DECIMAL order by dec +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT dec FROM DECIMAL order by dec +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal + Statistics: Num rows: 1 Data size: 219 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dec (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 219 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 219 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 219 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 219 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT dec FROM DECIMAL order by dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec FROM DECIMAL order by dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal +#### A masked pattern was here #### +NULL +1000000000 +PREHOOK: query: DROP TABLE DECIMAL_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_txt +PREHOOK: Output: default@decimal_txt +POSTHOOK: query: DROP TABLE DECIMAL_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_txt +POSTHOOK: Output: default@decimal_txt +PREHOOK: query: DROP TABLE DECIMAL +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal +PREHOOK: Output: default@decimal +POSTHOOK: query: DROP TABLE DECIMAL +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal +POSTHOOK: Output: default@decimal diff --git ql/src/test/results/clientpositive/tez/vector_decimal_2.q.out ql/src/test/results/clientpositive/tez/vector_decimal_2.q.out new file mode 100644 index 0000000..af9459f --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_2.q.out @@ -0,0 +1,1676 @@ +PREHOOK: query: drop table decimal_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table decimal_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table decimal_2 (t decimal(18,9)) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_2 +POSTHOOK: query: create table decimal_2 (t decimal(18,9)) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_2 +PREHOOK: query: insert overwrite table decimal_2 + select cast('17.29' as decimal(4,2)) from src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@decimal_2 +POSTHOOK: query: insert overwrite table decimal_2 + select cast('17.29' as decimal(4,2)) from src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@decimal_2 +POSTHOOK: Lineage: decimal_2.t EXPRESSION [] +PREHOOK: query: explain +select cast(t as boolean) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as boolean) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToBoolean(t) (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: boolean) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as boolean) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as boolean) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +true +PREHOOK: query: explain +select cast(t as tinyint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as tinyint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToByte(t) (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: tinyint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as tinyint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as tinyint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as smallint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as smallint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToShort(t) (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as smallint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as smallint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as int) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as int) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToInteger(t) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as int) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as int) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as bigint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as bigint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToLong(t) (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as bigint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as bigint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as float) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as float) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToFloat(t) (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: float) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as float) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as float) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17.29 +PREHOOK: query: explain +select cast(t as double) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as double) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToDouble(t) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: double) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as double) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as double) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17.29 +PREHOOK: query: explain +select cast(t as string) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as string) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToString(t) (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as string) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as string) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17.29 +PREHOOK: query: insert overwrite table decimal_2 + select cast('3404045.5044003' as decimal(18,9)) from src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@decimal_2 +POSTHOOK: query: insert overwrite table decimal_2 + select cast('3404045.5044003' as decimal(18,9)) from src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@decimal_2 +POSTHOOK: Lineage: decimal_2.t EXPRESSION [] +PREHOOK: query: explain +select cast(t as boolean) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as boolean) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToBoolean(t) (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: boolean) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as boolean) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as boolean) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +true +PREHOOK: query: explain +select cast(t as tinyint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as tinyint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToByte(t) (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: tinyint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as tinyint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as tinyint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +13 +PREHOOK: query: explain +select cast(t as smallint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as smallint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToShort(t) (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as smallint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as smallint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +-3827 +PREHOOK: query: explain +select cast(t as int) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as int) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToInteger(t) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as int) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as int) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3404045 +PREHOOK: query: explain +select cast(t as bigint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as bigint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToLong(t) (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as bigint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as bigint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3404045 +PREHOOK: query: explain +select cast(t as float) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as float) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToFloat(t) (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: float) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as float) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as float) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3404045.5 +PREHOOK: query: explain +select cast(t as double) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as double) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToDouble(t) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: double) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as double) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as double) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3404045.5044003 +PREHOOK: query: explain +select cast(t as string) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as string) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToString(t) (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as string) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as string) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3404045.5044003 +PREHOOK: query: explain +select cast(3.14 as decimal(4,2)) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(3.14 as decimal(4,2)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3.14 AS decimal(4,2)) (type: decimal(4,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(4,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(3.14 as decimal(4,2)) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(3.14 as decimal(4,2)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3.14 +PREHOOK: query: explain +select cast(cast(3.14 as float) as decimal(4,2)) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(cast(3.14 as float) as decimal(4,2)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3.14 AS decimal(4,2)) (type: decimal(4,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(4,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(cast(3.14 as float) as decimal(4,2)) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(cast(3.14 as float) as decimal(4,2)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3.14 +PREHOOK: query: explain +select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 2012-12-19 11:12:19.1234567 AS decimal(30,8)) (type: decimal(30,8)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(30,8)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(30,8)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +1355944339.1234567 +PREHOOK: query: explain +select cast(true as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(true as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( true AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain +select cast(true as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(true as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( true AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(true as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(true as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +1 +PREHOOK: query: explain +select cast(3Y as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(3Y as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3 AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(3Y as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(3Y as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3 +PREHOOK: query: explain +select cast(3S as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(3S as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3 AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(3S as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(3S as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3 +PREHOOK: query: explain +select cast(cast(3 as int) as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(cast(3 as int) as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3 AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(cast(3 as int) as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(cast(3 as int) as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3 +PREHOOK: query: explain +select cast(3L as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(3L as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3 AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(3L as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(3L as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3 +PREHOOK: query: explain +select cast(0.99999999999999999999 as decimal(20,19)) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(0.99999999999999999999 as decimal(20,19)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 1.0 AS decimal(20,19)) (type: decimal(20,19)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(20,19)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(20,19)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(0.99999999999999999999 as decimal(20,19)) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(0.99999999999999999999 as decimal(20,19)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +1.0 +PREHOOK: query: explain +select cast('0.99999999999999999999' as decimal(20,20)) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast('0.99999999999999999999' as decimal(20,20)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( '0.99999999999999999999' AS decimal(20,20)) (type: decimal(20,20)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(20,20)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(20,20)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast('0.99999999999999999999' as decimal(20,20)) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast('0.99999999999999999999' as decimal(20,20)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +0.99999999999999999999 +PREHOOK: query: drop table decimal_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_2 +PREHOOK: Output: default@decimal_2 +POSTHOOK: query: drop table decimal_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_2 +POSTHOOK: Output: default@decimal_2 diff --git ql/src/test/results/clientpositive/tez/vector_decimal_3.q.out ql/src/test/results/clientpositive/tez/vector_decimal_3.q.out new file mode 100644 index 0000000..9c8b02b --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_3.q.out @@ -0,0 +1,374 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_3_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_3_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_3_txt(key decimal(38,18), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_3_txt +POSTHOOK: query: CREATE TABLE DECIMAL_3_txt(key decimal(38,18), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_3_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_3_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_3_txt +PREHOOK: query: CREATE TABLE DECIMAL_3 STORED AS ORC AS SELECT * FROM DECIMAL_3_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@decimal_3_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_3 +POSTHOOK: query: CREATE TABLE DECIMAL_3 STORED AS ORC AS SELECT * FROM DECIMAL_3_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@decimal_3_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_3 +PREHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +NULL 0 +-1234567890.1234567890 -1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -1 +-1.12 -1 +-0.333 0 +-0.33 0 +-0.3 0 +0.000000000000000000 0 +0 0 +0 0 +0.01 0 +0.02 0 +0.1 0 +0.2 0 +0.3 0 +0.33 0 +0.333 0 +1 1 +1.0 1 +1.000000000000000000 1 +1.12 1 +1.122 1 +2 2 +2 2 +3.14 3 +3.14 3 +3.14 3 +3.140 4 +10 10 +20 20 +100 100 +124.00 124 +125.2 125 +200 200 +1234567890.1234567800 1234567890 +PREHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key DESC, value DESC +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key DESC, value DESC +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +1234567890.1234567800 1234567890 +200 200 +125.2 125 +124.00 124 +100 100 +20 20 +10 10 +3.140 4 +3.14 3 +3.14 3 +3.14 3 +2 2 +2 2 +1.122 1 +1.12 1 +1.000000000000000000 1 +1.0 1 +1 1 +0.333 0 +0.33 0 +0.3 0 +0.2 0 +0.1 0 +0.02 0 +0.01 0 +0 0 +0 0 +0.000000000000000000 0 +-0.3 0 +-0.33 0 +-0.333 0 +-1.12 -1 +-1.12 -1 +-1.122 -11 +-1255.49 -1255 +-4400 4400 +-1234567890.1234567890 -1234567890 +NULL 0 +PREHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +NULL 0 +-1234567890.1234567890 -1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -1 +-1.12 -1 +-0.333 0 +-0.33 0 +-0.3 0 +0.000000000000000000 0 +0 0 +0 0 +0.01 0 +0.02 0 +0.1 0 +0.2 0 +0.3 0 +0.33 0 +0.333 0 +1 1 +1.0 1 +1.000000000000000000 1 +1.12 1 +1.122 1 +2 2 +2 2 +3.14 3 +3.14 3 +3.14 3 +3.140 4 +10 10 +20 20 +100 100 +124.00 124 +125.2 125 +200 200 +1234567890.1234567800 1234567890 +PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +NULL +-1234567890.1234567890 +-4400 +-1255.49 +-1.122 +-1.12 +-0.333 +-0.33 +-0.3 +0.000000000000000000 +0.01 +0.02 +0.1 +0.2 +0.3 +0.33 +0.333 +1 +1.12 +1.122 +2 +3.14 +10 +20 +100 +124.00 +125.2 +200 +1234567890.1234567800 +PREHOOK: query: SELECT key, sum(value) FROM DECIMAL_3 GROUP BY key ORDER BY key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT key, sum(value) FROM DECIMAL_3 GROUP BY key ORDER BY key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +NULL 0 +-1234567890.1234567890 -1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -2 +-0.333 0 +-0.33 0 +-0.3 0 +0.000000000000000000 0 +0.01 0 +0.02 0 +0.1 0 +0.2 0 +0.3 0 +0.33 0 +0.333 0 +1 3 +1.12 1 +1.122 1 +2 4 +3.14 13 +10 10 +20 20 +100 100 +124.00 124 +125.2 125 +200 200 +1234567890.1234567800 1234567890 +PREHOOK: query: SELECT value, sum(key) FROM DECIMAL_3 GROUP BY value ORDER BY value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT value, sum(key) FROM DECIMAL_3 GROUP BY value ORDER BY value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +-1234567890 -1234567890.1234567890 +-1255 -1255.49 +-11 -1.122 +-1 -2.24 +0 0.330000000000000000 +1 5.242000000000000000 +2 4 +3 9.42 +4 3.140 +10 10 +20 20 +100 100 +124 124.00 +125 125.2 +200 200 +4400 -4400 +1234567890 1234567890.1234567800 +PREHOOK: query: SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +-1234567890.1234567890 -1234567890 -1234567890.1234567890 -1234567890 +-4400 4400 -4400 4400 +-1255.49 -1255 -1255.49 -1255 +-1.122 -11 -1.122 -11 +-1.12 -1 -1.12 -1 +-1.12 -1 -1.12 -1 +-1.12 -1 -1.12 -1 +-1.12 -1 -1.12 -1 +-0.333 0 -0.333 0 +-0.33 0 -0.33 0 +-0.3 0 -0.3 0 +0.000000000000000000 0 0.000000000000000000 0 +0 0 0.000000000000000000 0 +0 0 0.000000000000000000 0 +0.01 0 0.01 0 +0.02 0 0.02 0 +0.1 0 0.1 0 +0.2 0 0.2 0 +0.3 0 0.3 0 +0.33 0 0.33 0 +0.333 0 0.333 0 +1 1 1 1 +1.0 1 1 1 +1.000000000000000000 1 1 1 +1.12 1 1.12 1 +1.122 1 1.122 1 +2 2 2 2 +2 2 2 2 +2 2 2 2 +2 2 2 2 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.140 4 3.14 3 +3.140 4 3.14 3 +3.140 4 3.14 3 +10 10 10 10 +20 20 20 20 +100 100 100 100 +124.00 124 124.00 124 +125.2 125 125.2 125 +200 200 200 200 +1234567890.1234567800 1234567890 1234567890.1234567800 1234567890 +PREHOOK: query: SELECT * FROM DECIMAL_3 WHERE key=3.14 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 WHERE key=3.14 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +3.14 3 +3.14 3 +3.14 3 +3.140 4 +PREHOOK: query: SELECT * FROM DECIMAL_3 WHERE key=3.140 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 WHERE key=3.140 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +3.14 3 +3.14 3 +3.14 3 +3.140 4 +PREHOOK: query: DROP TABLE DECIMAL_3_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_3_txt +PREHOOK: Output: default@decimal_3_txt +POSTHOOK: query: DROP TABLE DECIMAL_3_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_3_txt +POSTHOOK: Output: default@decimal_3_txt +PREHOOK: query: DROP TABLE DECIMAL_3 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_3 +PREHOOK: Output: default@decimal_3 +POSTHOOK: query: DROP TABLE DECIMAL_3 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_3 +POSTHOOK: Output: default@decimal_3 diff --git ql/src/test/results/clientpositive/tez/vector_decimal_4.q.out ql/src/test/results/clientpositive/tez/vector_decimal_4.q.out new file mode 100644 index 0000000..483ae1f --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_4.q.out @@ -0,0 +1,250 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_4_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_4_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_4_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_4_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_4_1(key decimal(35,25), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_4_1 +POSTHOOK: query: CREATE TABLE DECIMAL_4_1(key decimal(35,25), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_4_1 +PREHOOK: query: CREATE TABLE DECIMAL_4_2(key decimal(35,25), value decimal(35,25)) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_4_2 +POSTHOOK: query: CREATE TABLE DECIMAL_4_2(key decimal(35,25), value decimal(35,25)) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_4_2 +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_4_1 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_4_1 +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_4_2 SELECT key, key * 3 FROM DECIMAL_4_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_4_1 +PREHOOK: Output: default@decimal_4_2 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_4_2 SELECT key, key * 3 FROM DECIMAL_4_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_4_1 +POSTHOOK: Output: default@decimal_4_2 +POSTHOOK: Lineage: decimal_4_2.key SIMPLE [(decimal_4_1)decimal_4_1.FieldSchema(name:key, type:decimal(35,25), comment:null), ] +POSTHOOK: Lineage: decimal_4_2.value EXPRESSION [(decimal_4_1)decimal_4_1.FieldSchema(name:key, type:decimal(35,25), comment:null), ] +PREHOOK: query: SELECT * FROM DECIMAL_4_1 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_4_1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_4_1 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_4_1 +#### A masked pattern was here #### +NULL 0 +-1234567890.1234567890 -1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -1 +-1.12 -1 +-0.333 0 +-0.33 0 +-0.3 0 +0.0000000000000000000000000 0 +0 0 +0 0 +0.01 0 +0.02 0 +0.1 0 +0.2 0 +0.3 0 +0.33 0 +0.333 0 +0.9999999999999999999999999 1 +1 1 +1.0 1 +1.12 1 +1.122 1 +2 2 +2 2 +3.14 3 +3.14 3 +3.14 3 +3.140 4 +10 10 +20 20 +100 100 +124.00 124 +125.2 125 +200 200 +1234567890.1234567800 1234567890 +PREHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +NULL NULL +-1234567890.1234567890 -3703703670.3703703670 +-4400 -13200 +-1255.49 -3766.47 +-1.122 -3.366 +-1.12 -3.36 +-1.12 -3.36 +-0.333 -0.999 +-0.33 -0.99 +-0.3 -0.9 +0.0000000000000000000000000 0.0000000000000000000000000 +0 0 +0 0 +0.01 0.03 +0.02 0.06 +0.1 0.3 +0.2 0.6 +0.3 0.9 +0.33 0.99 +0.333 0.999 +0.9999999999999999999999999 2.9999999999999999999999997 +1 3 +1.0 3.0 +1.12 3.36 +1.122 3.366 +2 6 +2 6 +3.14 9.42 +3.14 9.42 +3.14 9.42 +3.140 9.420 +10 30 +20 60 +100 300 +124.00 372.00 +125.2 375.6 +200 600 +1234567890.1234567800 3703703670.3703703400 +PREHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +NULL NULL +-1234567890.1234567890 -3703703670.3703703670 +-4400 -13200 +-1255.49 -3766.47 +-1.122 -3.366 +-1.12 -3.36 +-1.12 -3.36 +-0.333 -0.999 +-0.33 -0.99 +-0.3 -0.9 +0.0000000000000000000000000 0.0000000000000000000000000 +0 0 +0 0 +0.01 0.03 +0.02 0.06 +0.1 0.3 +0.2 0.6 +0.3 0.9 +0.33 0.99 +0.333 0.999 +0.9999999999999999999999999 2.9999999999999999999999997 +1 3 +1.0 3.0 +1.12 3.36 +1.122 3.366 +2 6 +2 6 +3.14 9.42 +3.14 9.42 +3.14 9.42 +3.140 9.420 +10 30 +20 60 +100 300 +124.00 372.00 +125.2 375.6 +200 600 +1234567890.1234567800 3703703670.3703703400 +PREHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +NULL NULL +-1234567890.1234567890 -3703703670.3703703670 +-4400 -13200 +-1255.49 -3766.47 +-1.122 -3.366 +-1.12 -3.36 +-1.12 -3.36 +-0.333 -0.999 +-0.33 -0.99 +-0.3 -0.9 +0.0000000000000000000000000 0.0000000000000000000000000 +0 0 +0 0 +0.01 0.03 +0.02 0.06 +0.1 0.3 +0.2 0.6 +0.3 0.9 +0.33 0.99 +0.333 0.999 +0.9999999999999999999999999 2.9999999999999999999999997 +1 3 +1.0 3.0 +1.12 3.36 +1.122 3.366 +2 6 +2 6 +3.14 9.42 +3.14 9.42 +3.14 9.42 +3.140 9.420 +10 30 +20 60 +100 300 +124.00 372.00 +125.2 375.6 +200 600 +1234567890.1234567800 3703703670.3703703400 +PREHOOK: query: DROP TABLE DECIMAL_4_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_4_1 +PREHOOK: Output: default@decimal_4_1 +POSTHOOK: query: DROP TABLE DECIMAL_4_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_4_1 +POSTHOOK: Output: default@decimal_4_1 +PREHOOK: query: DROP TABLE DECIMAL_4_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_4_2 +PREHOOK: Output: default@decimal_4_2 +POSTHOOK: query: DROP TABLE DECIMAL_4_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_4_2 +POSTHOOK: Output: default@decimal_4_2 diff --git ql/src/test/results/clientpositive/tez/vector_decimal_5.q.out ql/src/test/results/clientpositive/tez/vector_decimal_5.q.out new file mode 100644 index 0000000..01b5f42 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_5.q.out @@ -0,0 +1,239 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_5_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_5_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_5 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_5 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_5_txt(key decimal(10,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_5_txt +POSTHOOK: query: CREATE TABLE DECIMAL_5_txt(key decimal(10,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_5_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_5_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_5_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_5_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_5_txt +PREHOOK: query: CREATE TABLE DECIMAL_5(key decimal(10,5), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_5 +POSTHOOK: query: CREATE TABLE DECIMAL_5(key decimal(10,5), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_5 +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_5 SELECT * FROM DECIMAL_5_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_5_txt +PREHOOK: Output: default@decimal_5 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_5 SELECT * FROM DECIMAL_5_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_5_txt +POSTHOOK: Output: default@decimal_5 +POSTHOOK: Lineage: decimal_5.key SIMPLE [(decimal_5_txt)decimal_5_txt.FieldSchema(name:key, type:decimal(10,5), comment:null), ] +POSTHOOK: Lineage: decimal_5.value SIMPLE [(decimal_5_txt)decimal_5_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: SELECT key FROM DECIMAL_5 ORDER BY key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT key FROM DECIMAL_5 ORDER BY key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +NULL +NULL +NULL +-4400 +-1255.49 +-1.122 +-1.12 +-1.12 +-0.333 +-0.33 +-0.3 +0.00000 +0 +0 +0.01 +0.02 +0.1 +0.2 +0.3 +0.33 +0.333 +1 +1.0 +1.00000 +1.12 +1.122 +2 +2 +3.14 +3.14 +3.14 +3.140 +10 +20 +100 +124.00 +125.2 +200 +PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +NULL +-4400 +-1255.49 +-1.122 +-1.12 +-0.333 +-0.33 +-0.3 +0.00000 +0.01 +0.02 +0.1 +0.2 +0.3 +0.33 +0.333 +1 +1.12 +1.122 +2 +3.14 +10 +20 +100 +124.00 +125.2 +200 +PREHOOK: query: SELECT cast(key as decimal) FROM DECIMAL_5 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT cast(key as decimal) FROM DECIMAL_5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +-4400 +NULL +0 +0 +100 +10 +1 +0 +0 +200 +20 +2 +0 +0 +0 +0 +0 +0 +0 +0 +0 +1 +2 +3 +-1 +-1 +-1 +1 +1 +124 +125 +-1255 +3 +3 +3 +1 +NULL +NULL +PREHOOK: query: SELECT cast(key as decimal(6,3)) FROM DECIMAL_5 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT cast(key as decimal(6,3)) FROM DECIMAL_5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +NULL +NULL +0.000 +0 +100 +10 +1 +0.1 +0.01 +200 +20 +2 +0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +1.0 +2 +3.14 +-1.12 +-1.12 +-1.122 +1.12 +1.122 +124.00 +125.2 +NULL +3.14 +3.14 +3.140 +1.000 +NULL +NULL +PREHOOK: query: DROP TABLE DECIMAL_5_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_5_txt +PREHOOK: Output: default@decimal_5_txt +POSTHOOK: query: DROP TABLE DECIMAL_5_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_5_txt +POSTHOOK: Output: default@decimal_5_txt +PREHOOK: query: DROP TABLE DECIMAL_5 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_5 +PREHOOK: Output: default@decimal_5 +POSTHOOK: query: DROP TABLE DECIMAL_5 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_5 +POSTHOOK: Output: default@decimal_5 diff --git ql/src/test/results/clientpositive/tez/vector_decimal_6.q.out ql/src/test/results/clientpositive/tez/vector_decimal_6.q.out new file mode 100644 index 0000000..7ecd500 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_6.q.out @@ -0,0 +1,303 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_6_1_txt(key decimal(10,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_6_1_txt +POSTHOOK: query: CREATE TABLE DECIMAL_6_1_txt(key decimal(10,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_6_1_txt +PREHOOK: query: CREATE TABLE DECIMAL_6_2_txt(key decimal(17,4), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_6_2_txt +POSTHOOK: query: CREATE TABLE DECIMAL_6_2_txt(key decimal(17,4), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_6_2_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_6_1_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_6_1_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_2_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_6_2_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_2_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_6_2_txt +PREHOOK: query: CREATE TABLE DECIMAL_6_1(key decimal(10,5), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_6_1 +POSTHOOK: query: CREATE TABLE DECIMAL_6_1(key decimal(10,5), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_6_1 +PREHOOK: query: CREATE TABLE DECIMAL_6_2(key decimal(17,4), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_6_2 +POSTHOOK: query: CREATE TABLE DECIMAL_6_2(key decimal(17,4), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_6_2 +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_1 SELECT * FROM DECIMAL_6_1_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1_txt +PREHOOK: Output: default@decimal_6_1 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_1 SELECT * FROM DECIMAL_6_1_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1_txt +POSTHOOK: Output: default@decimal_6_1 +POSTHOOK: Lineage: decimal_6_1.key SIMPLE [(decimal_6_1_txt)decimal_6_1_txt.FieldSchema(name:key, type:decimal(10,5), comment:null), ] +POSTHOOK: Lineage: decimal_6_1.value SIMPLE [(decimal_6_1_txt)decimal_6_1_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_2 SELECT * FROM DECIMAL_6_2_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_2_txt +PREHOOK: Output: default@decimal_6_2 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_2 SELECT * FROM DECIMAL_6_2_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_2_txt +POSTHOOK: Output: default@decimal_6_2 +POSTHOOK: Lineage: decimal_6_2.key SIMPLE [(decimal_6_2_txt)decimal_6_2_txt.FieldSchema(name:key, type:decimal(17,4), comment:null), ] +POSTHOOK: Lineage: decimal_6_2.value SIMPLE [(decimal_6_2_txt)decimal_6_2_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: SELECT * FROM DECIMAL_6_1 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_6_1 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1 +#### A masked pattern was here #### +NULL -1234567890 +NULL 0 +NULL 3 +NULL 4 +NULL 1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -1 +-0.333 0 +-0.3 0 +0.00000 0 +0 0 +0.333 0 +1.0 1 +1.00000 1 +1.12 1 +1.122 1 +2 2 +3.14 3 +3.14 3 +3.140 4 +10 10 +10.73433 5 +124.00 124 +125.2 125 +23232.23435 2 +PREHOOK: query: SELECT * FROM DECIMAL_6_2 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_6_2 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +NULL 0 +-1234567890.1235 -1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -1 +-0.333 0 +-0.3 0 +0.0000 0 +0 0 +0.333 0 +1.0 1 +1.0000 1 +1.12 1 +1.122 1 +2 2 +3.14 3 +3.14 3 +3.140 4 +10 10 +10.7343 5 +124.00 124 +125.2 125 +23232.2344 2 +2389432.2375 3 +2389432.2375 4 +1234567890.1235 1234567890 +PREHOOK: query: SELECT T.key from ( + SELECT key, value from DECIMAL_6_1 + UNION ALL + SELECT key, value from DECIMAL_6_2 +) T order by T.key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1 +PREHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT T.key from ( + SELECT key, value from DECIMAL_6_1 + UNION ALL + SELECT key, value from DECIMAL_6_2 +) T order by T.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1 +POSTHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +NULL +NULL +NULL +NULL +NULL +NULL +-1234567890.1235 +-4400 +-4400 +-1255.49 +-1255.49 +-1.122 +-1.122 +-1.12 +-1.12 +-0.333 +-0.333 +-0.3 +-0.3 +0.00000 +0.0000 +0 +0 +0.333 +0.333 +1.0 +1.0 +1.0000 +1.00000 +1.12 +1.12 +1.122 +1.122 +2 +2 +3.14 +3.14 +3.14 +3.14 +3.140 +3.140 +10 +10 +10.7343 +10.73433 +124.00 +124.00 +125.2 +125.2 +23232.23435 +23232.2344 +2389432.2375 +2389432.2375 +1234567890.1235 +PREHOOK: query: CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@decimal_6_1 +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_6_3 +POSTHOOK: query: CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@decimal_6_1 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_6_3 +PREHOOK: query: desc DECIMAL_6_3 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@decimal_6_3 +POSTHOOK: query: desc DECIMAL_6_3 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@decimal_6_3 +k double +v int +PREHOOK: query: SELECT * FROM DECIMAL_6_3 ORDER BY k, v +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_6_3 ORDER BY k, v +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_3 +#### A masked pattern was here #### +NULL -695344902 +NULL 0 +NULL 33 +NULL 44 +NULL 695344902 +-4394.5 48400 +-1249.99 -13805 +4.378 -121 +4.38 -11 +5.167 0 +5.2 0 +5.5 0 +5.5 0 +5.833 0 +6.5 11 +6.5 11 +6.62 11 +6.622 11 +7.5 22 +8.64 33 +8.64 33 +8.64 44 +15.5 110 +16.23433 55 +129.5 1364 +130.7 1375 +23237.73435 22 diff --git ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out index f1b7b8b..e4b2c56 100644 --- ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out +++ ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out @@ -113,14 +113,14 @@ POSTHOOK: query: SELECT cint, POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_vgby #### A masked pattern was here #### -NULL 3072 9318.4351351351 -4298.1513513514 5018444.1081079808 3072 11160.71538461538500 -5147.90769230769300 6010604.30769230735360 --3728 6 5831542.2692483780 -3367.6517567568 5817556.0411483778 6 6984454.21109769200000 -4033.445769230769 6967702.86724384584710 --563 2 -515.6210729730 -3367.6517567568 -3883.2728297298 2 -617.56077692307690 -4033.445769230769 -4651.00654615384590 -762 2 5831542.2692483780 1531.2194054054 5833073.4886537834 2 6984454.21109769200000 1833.9456923076925 6986288.15678999969250 -6981 3 5831542.269248378 -515.6210729730 5830511.0271024320 3 6984454.211097692 -617.56077692307690 6983219.08954384584620 -253665376 1024 9767.0054054054 -9779.5486486487 -347484.0818378374 1024 11697.96923076923100 -11712.99230769231000 -416182.64030769233089 -528534767 1024 5831542.2692483780 -9777.1594594595 11646372.8607481068 1024 6984454.21109769200000 -11710.13076923077100 13948892.79980307629003 -626923679 1024 9723.4027027027 -9778.9513513514 10541.0525297287 1024 11645.74615384615400 -11712.27692307692300 12625.04759999997746 +NULL 3072 9318.4351351351 -4298.1513513514 5018444.1081079808 3072 11160.715384615385 -5147.907692307693 6010604.3076923073536 +-3728 6 5831542.269248378 -3367.6517567568 5817556.0411483778 6 6984454.211097692 -4033.445769230769 6967702.8672438458471 +-563 2 -515.6210729730 -3367.6517567568 -3883.2728297298 2 -617.5607769230769 -4033.445769230769 -4651.0065461538459 +762 2 5831542.269248378 1531.2194054054 5833073.4886537834 2 6984454.211097692 1833.9456923076925 6986288.1567899996925 +6981 3 5831542.269248378 -515.6210729730 5830511.0271024320 3 6984454.211097692 -617.5607769230769 6983219.0895438458462 +253665376 1024 9767.0054054054 -9779.5486486487 -347484.0818378374 1024 11697.969230769231 -11712.99230769231 -416182.64030769233089 +528534767 1024 5831542.269248378 -9777.1594594595 11646372.8607481068 1024 6984454.211097692 -11710.130769230771 13948892.79980307629003 +626923679 1024 9723.4027027027 -9778.9513513514 10541.0525297287 1024 11645.746153846154 -11712.276923076923 12625.04759999997746 PREHOOK: query: -- Now add the others... EXPLAIN SELECT cint, COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1), @@ -217,11 +217,11 @@ POSTHOOK: query: SELECT cint, POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_vgby #### A masked pattern was here #### -NULL 3072 9318.4351351351 -4298.1513513514 5018444.1081079808 1633.60810810806667 5695.483082135364 5696.4103077145055 3072 11160.71538461538500 -5147.90769230769300 6010604.30769230735360 1956.576923076922966667 6821.495748565159 6822.606289190924 --3728 6 5831542.2692483780 -3367.6517567568 5817556.0411483778 969592.67352472963333 2174330.2092403853 2381859.406131774 6 6984454.21109769200000 -4033.445769230769 6967702.86724384584710 1161283.811207307641183333 2604201.2704476737 2852759.5602156054 --563 2 -515.6210729730 -3367.6517567568 -3883.2728297298 -1941.6364148649 1426.0153418918999 2016.6902366556308 2 -617.56077692307690 -4033.445769230769 -4651.00654615384590 -2325.50327307692295 1707.9424961538462 2415.395441814127 -762 2 5831542.2692483780 1531.2194054054 5833073.4886537834 2916536.7443268917 2915005.5249214866 4122440.3477364695 2 6984454.21109769200000 1833.9456923076925 6986288.15678999969250 3493144.07839499984625 3491310.1327026924 4937458.140118758 -6981 3 5831542.269248378 -515.6210729730 5830511.0271024320 1943503.67570081066667 2749258.455012492 3367140.1929065133 3 6984454.211097692 -617.56077692307690 6983219.08954384584620 2327739.696514615282066667 3292794.4113115156 4032833.0678006653 -253665376 1024 9767.0054054054 -9779.5486486487 -347484.0818378374 -339.33992366976309 5708.9563478862 5711.745967572779 1024 11697.96923076923100 -11712.99230769231000 -416182.64030769233089 -406.428359675480791885 6837.632716002934 6840.973851172274 -528534767 1024 5831542.2692483780 -9777.1594594595 11646372.8607481068 11373.41099682432305 257528.92988206653 257654.7686043977 1024 6984454.21109769200000 -11710.13076923077100 13948892.79980307629003 13621.965624807691689482 308443.1074570801 308593.82484083984 -626923679 1024 9723.4027027027 -9778.9513513514 10541.0525297287 10.29399661106318 5742.09145323734 5744.897264034267 1024 11645.74615384615400 -11712.27692307692300 12625.04759999997746 12.329148046874977988 6877.318722794877 6880.679250101603 +NULL 3072 9318.4351351351 -4298.1513513514 5018444.1081079808 1633.60810810806667 5695.483082135364 5696.4103077145055 3072 11160.715384615385 -5147.907692307693 6010604.3076923073536 1956.576923076922966667 6821.495748565159 6822.606289190924 +-3728 6 5831542.269248378 -3367.6517567568 5817556.0411483778 969592.67352472963333 2174330.2092403853 2381859.406131774 6 6984454.211097692 -4033.445769230769 6967702.8672438458471 1161283.811207307641183333 2604201.2704476737 2852759.5602156054 +-563 2 -515.6210729730 -3367.6517567568 -3883.2728297298 -1941.6364148649 1426.0153418918999 2016.6902366556308 2 -617.5607769230769 -4033.445769230769 -4651.0065461538459 -2325.50327307692295 1707.9424961538462 2415.395441814127 +762 2 5831542.269248378 1531.2194054054 5833073.4886537834 2916536.7443268917 2915005.5249214866 4122440.3477364695 2 6984454.211097692 1833.9456923076925 6986288.1567899996925 3493144.07839499984625 3491310.1327026924 4937458.140118758 +6981 3 5831542.269248378 -515.6210729730 5830511.0271024320 1943503.67570081066667 2749258.455012492 3367140.1929065133 3 6984454.211097692 -617.5607769230769 6983219.0895438458462 2327739.696514615282066667 3292794.4113115156 4032833.0678006653 +253665376 1024 9767.0054054054 -9779.5486486487 -347484.0818378374 -339.33992366976309 5708.9563478862 5711.745967572779 1024 11697.969230769231 -11712.99230769231 -416182.64030769233089 -406.428359675480791885 6837.632716002934 6840.973851172274 +528534767 1024 5831542.269248378 -9777.1594594595 11646372.8607481068 11373.41099682432305 257528.92988206653 257654.7686043977 1024 6984454.211097692 -11710.130769230771 13948892.79980307629003 13621.965624807691689482 308443.1074570801 308593.82484083984 +626923679 1024 9723.4027027027 -9778.9513513514 10541.0525297287 10.29399661106318 5742.09145323734 5744.897264034267 1024 11645.746153846154 -11712.276923076923 12625.04759999997746 12.329148046874977988 6877.318722794877 6880.679250101603 diff --git ql/src/test/results/clientpositive/tez/vector_decimal_cast.q.out ql/src/test/results/clientpositive/tez/vector_decimal_cast.q.out new file mode 100644 index 0000000..e2fe3a2 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_cast.q.out @@ -0,0 +1,41 @@ +PREHOOK: query: EXPLAIN SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS DECIMAL(20,10)), CAST(cint AS DECIMAL(23,14)), CAST(cboolean1 AS DECIMAL(5,2)), CAST(ctimestamp1 AS DECIMAL(15,0)) FROM alltypesorc WHERE cdouble IS NOT NULL AND cint IS NOT NULL AND cboolean1 IS NOT NULL AND ctimestamp1 IS NOT NULL LIMIT 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS DECIMAL(20,10)), CAST(cint AS DECIMAL(23,14)), CAST(cboolean1 AS DECIMAL(5,2)), CAST(ctimestamp1 AS DECIMAL(15,0)) FROM alltypesorc WHERE cdouble IS NOT NULL AND cint IS NOT NULL AND cboolean1 IS NOT NULL AND ctimestamp1 IS NOT NULL LIMIT 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + TableScan + alias: alltypesorc + Filter Operator + predicate: (((cdouble is not null and cint is not null) and cboolean1 is not null) and ctimestamp1 is not null) (type: boolean) + Select Operator + expressions: cdouble (type: double), cint (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), CAST( cdouble AS decimal(20,10)) (type: decimal(20,10)), CAST( cint AS decimal(23,14)) (type: decimal(23,14)), CAST( cboolean1 AS decimal(5,2)) (type: decimal(5,2)), CAST( ctimestamp1 AS decimal(15,0)) (type: decimal(15,0)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Limit + Number of rows: 10 + ListSink + +PREHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS DECIMAL(20,10)), CAST(cint AS DECIMAL(23,14)), CAST(cboolean1 AS DECIMAL(5,2)), CAST(ctimestamp1 AS DECIMAL(15,0)) FROM alltypesorc WHERE cdouble IS NOT NULL AND cint IS NOT NULL AND cboolean1 IS NOT NULL AND ctimestamp1 IS NOT NULL LIMIT 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS DECIMAL(20,10)), CAST(cint AS DECIMAL(23,14)), CAST(cboolean1 AS DECIMAL(5,2)), CAST(ctimestamp1 AS DECIMAL(15,0)) FROM alltypesorc WHERE cdouble IS NOT NULL AND cint IS NOT NULL AND cboolean1 IS NOT NULL AND ctimestamp1 IS NOT NULL LIMIT 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +-13326.0 528534767 true 1969-12-31 15:59:46.674 -13326.0 528534767 1 -13 +-15813.0 528534767 true 1969-12-31 15:59:55.787 -15813.0 528534767 1 -4 +-9566.0 528534767 true 1969-12-31 15:59:44.187 -9566.0 528534767 1 -16 +15007.0 528534767 true 1969-12-31 15:59:50.434 15007.0 528534767 1 -10 +7021.0 528534767 true 1969-12-31 16:00:15.007 7021.0 528534767 1 15 +4963.0 528534767 true 1969-12-31 16:00:07.021 4963.0 528534767 1 7 +-7824.0 528534767 true 1969-12-31 16:00:04.963 -7824.0 528534767 1 5 +-15431.0 528534767 true 1969-12-31 15:59:52.176 -15431.0 528534767 1 -8 +-15549.0 528534767 true 1969-12-31 15:59:44.569 -15549.0 528534767 1 -15 +5780.0 528534767 true 1969-12-31 15:59:44.451 5780.0 528534767 1 -16 diff --git ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out new file mode 100644 index 0000000..7faa630 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out @@ -0,0 +1,51 @@ +PREHOOK: query: CREATE TABLE decimal_test STORED AS ORC AS SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2 FROM alltypesorc +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_test +POSTHOOK: query: CREATE TABLE decimal_test STORED AS ORC AS SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2 FROM alltypesorc +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_test +PREHOOK: query: EXPLAIN SELECT cdecimal1 + cdecimal2, cdecimal1 - (2*cdecimal2), ((cdecimal1+2.34)/cdecimal2), (cdecimal1 * (cdecimal2/3.4)), cdecimal1 % 10, CAST(cdecimal1 AS INT), CAST(cdecimal2 AS SMALLINT), CAST(cdecimal2 AS TINYINT), CAST(cdecimal1 AS BIGINT), CAST (cdecimal1 AS BOOLEAN), CAST(cdecimal2 AS DOUBLE), CAST(cdecimal1 AS FLOAT), CAST(cdecimal2 AS STRING), CAST(cdecimal1 AS TIMESTAMP) FROM decimal_test WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL LIMIT 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT cdecimal1 + cdecimal2, cdecimal1 - (2*cdecimal2), ((cdecimal1+2.34)/cdecimal2), (cdecimal1 * (cdecimal2/3.4)), cdecimal1 % 10, CAST(cdecimal1 AS INT), CAST(cdecimal2 AS SMALLINT), CAST(cdecimal2 AS TINYINT), CAST(cdecimal1 AS BIGINT), CAST (cdecimal1 AS BOOLEAN), CAST(cdecimal2 AS DOUBLE), CAST(cdecimal1 AS FLOAT), CAST(cdecimal2 AS STRING), CAST(cdecimal1 AS TIMESTAMP) FROM decimal_test WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL LIMIT 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + TableScan + alias: decimal_test + Filter Operator + predicate: (((((cdecimal1 > 0) and (cdecimal1 < 12345.5678)) and (cdecimal2 <> 0)) and (cdecimal2 > 1000)) and cdouble is not null) (type: boolean) + Select Operator + expressions: (cdecimal1 + cdecimal2) (type: decimal(25,14)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(26,14)), ((cdecimal1 + 2.34) / cdecimal2) (type: double), (cdecimal1 * (cdecimal2 / 3.4)) (type: double), (cdecimal1 % 10) (type: decimal(12,10)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), UDFToString(cdecimal2) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 + Limit + Number of rows: 10 + ListSink + +PREHOOK: query: SELECT cdecimal1 + cdecimal2, cdecimal1 - (2*cdecimal2), ((cdecimal1+2.34)/cdecimal2), (cdecimal1 * (cdecimal2/3.4)), cdecimal1 % 10, CAST(cdecimal1 AS INT), CAST(cdecimal2 AS SMALLINT), CAST(cdecimal2 AS TINYINT), CAST(cdecimal1 AS BIGINT), CAST (cdecimal1 AS BOOLEAN), CAST(cdecimal2 AS DOUBLE), CAST(cdecimal1 AS FLOAT), CAST(cdecimal2 AS STRING), CAST(cdecimal1 AS TIMESTAMP) FROM decimal_test WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL LIMIT 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_test +#### A masked pattern was here #### +POSTHOOK: query: SELECT cdecimal1 + cdecimal2, cdecimal1 - (2*cdecimal2), ((cdecimal1+2.34)/cdecimal2), (cdecimal1 * (cdecimal2/3.4)), cdecimal1 % 10, CAST(cdecimal1 AS INT), CAST(cdecimal2 AS SMALLINT), CAST(cdecimal2 AS TINYINT), CAST(cdecimal1 AS BIGINT), CAST (cdecimal1 AS BOOLEAN), CAST(cdecimal2 AS DOUBLE), CAST(cdecimal1 AS FLOAT), CAST(cdecimal2 AS STRING), CAST(cdecimal1 AS TIMESTAMP) FROM decimal_test WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL LIMIT 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_test +#### A masked pattern was here #### +19699.417463617423 -12507.913305613346 0.8351496686995997 2.8303425077026896E7 3.6405405405 8963 10735 -17 8963 true 10735.776923076923 8963.641 10735.776923076923 1969-12-31 18:29:23.64054054 +9216.339708939685 -5851.806444906470 0.8353975893550668 6195112.1797296945 3.6243243243 4193 5022 -98 4193 true 5022.715384615385 4193.6245 5022.715384615385 1969-12-31 17:09:53.624324324 +6514.8403326403464 -4136.5212058211928 0.8355907765708067 3095563.9418919063 4.3864864865 2964 3550 -34 2964 true 3550.4538461538464 2964.3865 3550.4538461538464 1969-12-31 16:49:24.386486486 +7587.301455301477 -4817.467775467754 0.8354976172734904 4198623.24324327 2.3783783784 3452 4134 38 3452 true 4134.923076923077 3452.3784 4134.923076923077 1969-12-31 16:57:32.378378378 +19197.9729729730 -12189.5270270270 0.835155361813429 2.6880848817567654E7 5.4729729730 8735 10462 -34 8735 true 10462.5 8735.473 10462.5 1969-12-31 18:25:35.472972973 +17098.9945945946 -10856.8054054054 0.8351828165813104 2.132423090270272E7 0.3945945946 7780 9318 102 7780 true 9318.6 7780.3945 9318.6 1969-12-31 18:09:40.394594594 +12433.723076923077 -7894.646153846154 0.8352770361086894 1.12754688E7 7.6 5657 6776 120 5657 true 6776.123076923077 5657.6 6776.123076923077 1969-12-31 17:34:17.6 +7247.316839916862 -4601.598544698524 0.8355241651897876 3830775.6932432684 7.6783783784 3297 3949 109 3297 true 3949.638461538462 3297.6785 3949.638461538462 1969-12-31 16:54:57.678378378 +14757.1700623700465 -9369.8914760914930 0.8352226654922171 1.5883214124324286E7 4.8162162162 6714 8042 106 6714 true 8042.3538461538465 6714.8164 8042.3538461538465 1969-12-31 17:51:54.816216216 +10964.832016631993 -6961.991060291086 0.8353232978714221 8768719.779729689 9.2243243243 4989 5975 87 4989 true 5975.607692307693 4989.224 5975.607692307693 1969-12-31 17:23:09.224324324 diff --git ql/src/test/results/clientpositive/tez/vector_decimal_mapjoin.q.out ql/src/test/results/clientpositive/tez/vector_decimal_mapjoin.q.out new file mode 100644 index 0000000..6a5ccc6 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_mapjoin.q.out @@ -0,0 +1,211 @@ +PREHOOK: query: -- SORT_QUERY_RESULTS + +CREATE TABLE decimal_mapjoin STORED AS ORC AS + SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, + CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2, + cint + FROM alltypesorc +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_mapjoin +POSTHOOK: query: -- SORT_QUERY_RESULTS + +CREATE TABLE decimal_mapjoin STORED AS ORC AS + SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, + CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2, + cint + FROM alltypesorc +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_mapjoin +PREHOOK: query: EXPLAIN SELECT l.cint, r.cint, l.cdecimal1, r.cdecimal2 + FROM decimal_mapjoin l + JOIN decimal_mapjoin r ON l.cint = r.cint + WHERE l.cint = 6981 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT l.cint, r.cint, l.cdecimal1, r.cdecimal2 + FROM decimal_mapjoin l + JOIN decimal_mapjoin r ON l.cint = r.cint + WHERE l.cint = 6981 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: r + Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (cint = 6981) (type: boolean) + Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: 6981 (type: int) + sort order: + + Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + value expressions: cdecimal2 (type: decimal(23,14)) + Execution mode: vectorized + Map 2 + Map Operator Tree: + TableScan + alias: l + Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (cint = 6981) (type: boolean) + Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {cdecimal1} + 1 {cdecimal2} + keys: + 0 6981 (type: int) + 1 6981 (type: int) + outputColumnNames: _col1, _col9 + input vertices: + 1 Map 1 + Statistics: Num rows: 6758 Data size: 1190783 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 6981 (type: int), 6981 (type: int), _col1 (type: decimal(20,10)), _col9 (type: decimal(23,14)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 6758 Data size: 1190783 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6758 Data size: 1190783 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT l.cint, r.cint, l.cdecimal1, r.cdecimal2 + FROM decimal_mapjoin l + JOIN decimal_mapjoin r ON l.cint = r.cint + WHERE l.cint = 6981 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_mapjoin +#### A masked pattern was here #### +POSTHOOK: query: SELECT l.cint, r.cint, l.cdecimal1, r.cdecimal2 + FROM decimal_mapjoin l + JOIN decimal_mapjoin r ON l.cint = r.cint + WHERE l.cint = 6981 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_mapjoin +#### A masked pattern was here #### +6981 6981 -515.6210729730 -617.5607769230769 +6981 6981 -515.6210729730 -617.5607769230769 +6981 6981 -515.6210729730 -617.5607769230769 +6981 6981 -515.6210729730 -617.5607769230769 +6981 6981 -515.6210729730 6984454.211097692 +6981 6981 -515.6210729730 6984454.211097692 +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 -515.6210729730 NULL +6981 6981 5831542.269248378 -617.5607769230769 +6981 6981 5831542.269248378 -617.5607769230769 +6981 6981 5831542.269248378 6984454.211097692 +6981 6981 5831542.269248378 NULL +6981 6981 5831542.269248378 NULL +6981 6981 5831542.269248378 NULL +6981 6981 5831542.269248378 NULL +6981 6981 5831542.269248378 NULL +6981 6981 5831542.269248378 NULL +6981 6981 5831542.269248378 NULL +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL -617.5607769230769 +6981 6981 NULL 6984454.211097692 +6981 6981 NULL 6984454.211097692 +6981 6981 NULL 6984454.211097692 +6981 6981 NULL 6984454.211097692 +6981 6981 NULL 6984454.211097692 +6981 6981 NULL 6984454.211097692 +6981 6981 NULL 6984454.211097692 +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL +6981 6981 NULL NULL diff --git ql/src/test/results/clientpositive/tez/vector_decimal_math_funcs.q.out ql/src/test/results/clientpositive/tez/vector_decimal_math_funcs.q.out new file mode 100644 index 0000000..9e09f71 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_math_funcs.q.out @@ -0,0 +1,192 @@ +PREHOOK: query: CREATE TABLE decimal_test STORED AS ORC AS SELECT cbigint, cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2 FROM alltypesorc +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_test +POSTHOOK: query: CREATE TABLE decimal_test STORED AS ORC AS SELECT cbigint, cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2 FROM alltypesorc +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_test +PREHOOK: query: -- Test math functions in vectorized mode to verify they run correctly end-to-end. + +explain +select + cdecimal1 + ,Round(cdecimal1, 2) + ,Round(cdecimal1) + ,Floor(cdecimal1) + ,Ceil(cdecimal1) + ,round(Exp(cdecimal1), 58) + ,Ln(cdecimal1) + ,Log10(cdecimal1) + -- Use log2 as a representative function to test all input types. + ,Log2(cdecimal1) + -- Use 15601.0 to test zero handling, as there are no zeroes in the table + ,Log2(cdecimal1 - 15601.0) + ,Log(2.0, cdecimal1) + ,Pow(log2(cdecimal1), 2.0) + ,Power(log2(cdecimal1), 2.0) + ,Sqrt(cdecimal1) + ,Abs(cdecimal1) + ,Sin(cdecimal1) + ,Asin(cdecimal1) + ,Cos(cdecimal1) + ,ACos(cdecimal1) + ,Atan(cdecimal1) + ,Degrees(cdecimal1) + ,Radians(cdecimal1) + ,Positive(cdecimal1) + ,Negative(cdecimal1) + ,Sign(cdecimal1) + -- Test nesting + ,cos(-sin(log(cdecimal1)) + 3.14159) +from decimal_test +-- limit output to a reasonably small number of rows +where cbigint % 500 = 0 +-- test use of a math function in the WHERE clause +and sin(cdecimal1) >= -1.0 +PREHOOK: type: QUERY +POSTHOOK: query: -- Test math functions in vectorized mode to verify they run correctly end-to-end. + +explain +select + cdecimal1 + ,Round(cdecimal1, 2) + ,Round(cdecimal1) + ,Floor(cdecimal1) + ,Ceil(cdecimal1) + ,round(Exp(cdecimal1), 58) + ,Ln(cdecimal1) + ,Log10(cdecimal1) + -- Use log2 as a representative function to test all input types. + ,Log2(cdecimal1) + -- Use 15601.0 to test zero handling, as there are no zeroes in the table + ,Log2(cdecimal1 - 15601.0) + ,Log(2.0, cdecimal1) + ,Pow(log2(cdecimal1), 2.0) + ,Power(log2(cdecimal1), 2.0) + ,Sqrt(cdecimal1) + ,Abs(cdecimal1) + ,Sin(cdecimal1) + ,Asin(cdecimal1) + ,Cos(cdecimal1) + ,ACos(cdecimal1) + ,Atan(cdecimal1) + ,Degrees(cdecimal1) + ,Radians(cdecimal1) + ,Positive(cdecimal1) + ,Negative(cdecimal1) + ,Sign(cdecimal1) + -- Test nesting + ,cos(-sin(log(cdecimal1)) + 3.14159) +from decimal_test +-- limit output to a reasonably small number of rows +where cbigint % 500 = 0 +-- test use of a math function in the WHERE clause +and sin(cdecimal1) >= -1.0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: decimal_test + Filter Operator + predicate: (((cbigint % 500) = 0) and (sin(cdecimal1) >= -1.0)) (type: boolean) + Select Operator + expressions: cdecimal1 (type: decimal(20,10)), round(cdecimal1, 2) (type: decimal(13,2)), round(cdecimal1) (type: decimal(11,0)), floor(cdecimal1) (type: decimal(11,0)), ceil(cdecimal1) (type: decimal(11,0)), round(exp(cdecimal1), 58) (type: double), ln(cdecimal1) (type: double), log10(cdecimal1) (type: double), log2(cdecimal1) (type: double), log2((cdecimal1 - 15601.0)) (type: double), log(2.0, cdecimal1) (type: double), power(log2(cdecimal1), 2.0) (type: double), power(log2(cdecimal1), 2.0) (type: double), sqrt(cdecimal1) (type: double), abs(cdecimal1) (type: decimal(38,18)), sin(cdecimal1) (type: double), asin(cdecimal1) (type: double), cos(cdecimal1) (type: double), acos(cdecimal1) (type: double), atan(cdecimal1) (type: double), degrees(cdecimal1) (type: double), radians(cdecimal1) (type: double), cdecimal1 (type: decimal(20,10)), (- cdecimal1) (type: decimal(20,10)), sign(cdecimal1) (type: int), cos(((- sin(log(cdecimal1))) + 3.14159)) (type: double) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25 + ListSink + +PREHOOK: query: select + cdecimal1 + ,Round(cdecimal1, 2) + ,Round(cdecimal1) + ,Floor(cdecimal1) + ,Ceil(cdecimal1) + ,round(Exp(cdecimal1), 58) + ,Ln(cdecimal1) + ,Log10(cdecimal1) + -- Use log2 as a representative function to test all input types. + ,Log2(cdecimal1) + -- Use 15601.0 to test zero handling, as there are no zeroes in the table + ,Log2(cdecimal1 - 15601.0) + ,Log(2.0, cdecimal1) + ,Pow(log2(cdecimal1), 2.0) + ,Power(log2(cdecimal1), 2.0) + ,Sqrt(cdecimal1) + ,Abs(cdecimal1) + ,Sin(cdecimal1) + ,Asin(cdecimal1) + ,Cos(cdecimal1) + ,ACos(cdecimal1) + ,Atan(cdecimal1) + ,Degrees(cdecimal1) + ,Radians(cdecimal1) + ,Positive(cdecimal1) + ,Negative(cdecimal1) + ,Sign(cdecimal1) + -- Test nesting + ,cos(-sin(log(cdecimal1)) + 3.14159) +from decimal_test +-- limit output to a reasonably small number of rows +where cbigint % 500 = 0 +-- test use of a math function in the WHERE clause +and sin(cdecimal1) >= -1.0 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_test +#### A masked pattern was here #### +POSTHOOK: query: select + cdecimal1 + ,Round(cdecimal1, 2) + ,Round(cdecimal1) + ,Floor(cdecimal1) + ,Ceil(cdecimal1) + ,round(Exp(cdecimal1), 58) + ,Ln(cdecimal1) + ,Log10(cdecimal1) + -- Use log2 as a representative function to test all input types. + ,Log2(cdecimal1) + -- Use 15601.0 to test zero handling, as there are no zeroes in the table + ,Log2(cdecimal1 - 15601.0) + ,Log(2.0, cdecimal1) + ,Pow(log2(cdecimal1), 2.0) + ,Power(log2(cdecimal1), 2.0) + ,Sqrt(cdecimal1) + ,Abs(cdecimal1) + ,Sin(cdecimal1) + ,Asin(cdecimal1) + ,Cos(cdecimal1) + ,ACos(cdecimal1) + ,Atan(cdecimal1) + ,Degrees(cdecimal1) + ,Radians(cdecimal1) + ,Positive(cdecimal1) + ,Negative(cdecimal1) + ,Sign(cdecimal1) + -- Test nesting + ,cos(-sin(log(cdecimal1)) + 3.14159) +from decimal_test +-- limit output to a reasonably small number of rows +where cbigint % 500 = 0 +-- test use of a math function in the WHERE clause +and sin(cdecimal1) >= -1.0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_test +#### A masked pattern was here #### +-119.4594594595 -119.46 -119 -120 -119 1.316485E-52 NULL NULL NULL NULL NULL NULL NULL NULL 119.4594594595 -0.07885666683797002 NaN 0.9968859644388647 NaN -1.5624254815943668 -6844.522849943508 -2.0849608902209606 -119.4594594595 119.4594594595 -1 NULL +9318.4351351351 9318.44 9318 9318 9319 Infinity 9.13974998962673 3.969342986470191 13.185871984999437 NULL 13.185871984999437 173.867220004793 173.867220004793 96.53204201266593 9318.4351351351 0.4540668481851705 NaN 0.8909676185918236 NaN 1.5706890126394983 533907.0049096602 162.63737424163023 9318.4351351351 -9318.4351351351 1 -0.9607267417229353 +9318.4351351351 9318.44 9318 9318 9319 Infinity 9.13974998962673 3.969342986470191 13.185871984999437 NULL 13.185871984999437 173.867220004793 173.867220004793 96.53204201266593 9318.4351351351 0.4540668481851705 NaN 0.8909676185918236 NaN 1.5706890126394983 533907.0049096602 162.63737424163023 9318.4351351351 -9318.4351351351 1 -0.9607267417229353 +9318.4351351351 9318.44 9318 9318 9319 Infinity 9.13974998962673 3.969342986470191 13.185871984999437 NULL 13.185871984999437 173.867220004793 173.867220004793 96.53204201266593 9318.4351351351 0.4540668481851705 NaN 0.8909676185918236 NaN 1.5706890126394983 533907.0049096602 162.63737424163023 9318.4351351351 -9318.4351351351 1 -0.9607267417229353 +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL diff --git ql/src/test/results/clientpositive/tez/vector_decimal_precision.q.out ql/src/test/results/clientpositive/tez/vector_decimal_precision.q.out new file mode 100644 index 0000000..d155623 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_precision.q.out @@ -0,0 +1,675 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_PRECISION_txt(dec decimal(20,10)) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_PRECISION_txt +POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION_txt(dec decimal(20,10)) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_PRECISION_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_precision_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_precision_txt +PREHOOK: query: CREATE TABLE DECIMAL_PRECISION(dec decimal(20,10)) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_PRECISION +POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION(dec decimal(20,10)) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_PRECISION +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_PRECISION SELECT * FROM DECIMAL_PRECISION_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_txt +PREHOOK: Output: default@decimal_precision +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_PRECISION SELECT * FROM DECIMAL_PRECISION_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_txt +POSTHOOK: Output: default@decimal_precision +POSTHOOK: Lineage: decimal_precision.dec SIMPLE [(decimal_precision_txt)decimal_precision_txt.FieldSchema(name:dec, type:decimal(20,10), comment:null), ] +PREHOOK: query: SELECT * FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +0.0000000000 +0.0000000000 +0.0000000000 +0.0000000000 +0 +0.1234567890 +0.1234567890 +1.2345678901 +1.2345678901 +1.2345678901 +12.3456789012 +12.3456789012 +12.3456789012 +123.4567890123 +123.4567890123 +123.4567890123 +1234.5678901235 +1234.5678901235 +1234.5678901235 +12345.6789012346 +12345.6789012346 +123456.7890123456 +123456.7890123457 +1234567.890123456 +1234567.8901234568 +12345678.90123456 +12345678.9012345679 +123456789.0123456 +123456789.0123456789 +1234567890.123456 +1234567890.1234567890 +PREHOOK: query: SELECT dec, dec + 1, dec - 1 FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec, dec + 1, dec - 1 FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +0.0000000000 1.0000000000 -1.0000000000 +0.0000000000 1.0000000000 -1.0000000000 +0.0000000000 1.0000000000 -1.0000000000 +0.0000000000 1.0000000000 -1.0000000000 +0 1 -1 +0.1234567890 1.1234567890 -0.8765432110 +0.1234567890 1.1234567890 -0.8765432110 +1.2345678901 2.2345678901 0.2345678901 +1.2345678901 2.2345678901 0.2345678901 +1.2345678901 2.2345678901 0.2345678901 +12.3456789012 13.3456789012 11.3456789012 +12.3456789012 13.3456789012 11.3456789012 +12.3456789012 13.3456789012 11.3456789012 +123.4567890123 124.4567890123 122.4567890123 +123.4567890123 124.4567890123 122.4567890123 +123.4567890123 124.4567890123 122.4567890123 +1234.5678901235 1235.5678901235 1233.5678901235 +1234.5678901235 1235.5678901235 1233.5678901235 +1234.5678901235 1235.5678901235 1233.5678901235 +12345.6789012346 12346.6789012346 12344.6789012346 +12345.6789012346 12346.6789012346 12344.6789012346 +123456.7890123456 123457.7890123456 123455.7890123456 +123456.7890123457 123457.7890123457 123455.7890123457 +1234567.890123456 1234568.890123456 1234566.890123456 +1234567.8901234568 1234568.8901234568 1234566.8901234568 +12345678.90123456 12345679.90123456 12345677.90123456 +12345678.9012345679 12345679.9012345679 12345677.9012345679 +123456789.0123456 123456790.0123456 123456788.0123456 +123456789.0123456789 123456790.0123456789 123456788.0123456789 +1234567890.123456 1234567891.123456 1234567889.123456 +1234567890.1234567890 1234567891.1234567890 1234567889.1234567890 +PREHOOK: query: SELECT dec, dec * 2, dec / 3 FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec, dec * 2, dec / 3 FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +0.0000000000 0.0000000000 0 +0.0000000000 0.0000000000 0 +0.0000000000 0.0000000000 0 +0.0000000000 0.0000000000 0 +0 0 0 +0.1234567890 0.2469135780 0.041152263 +0.1234567890 0.2469135780 0.041152263 +1.2345678901 2.4691357802 0.411522630033 +1.2345678901 2.4691357802 0.411522630033 +1.2345678901 2.4691357802 0.411522630033 +12.3456789012 24.6913578024 4.1152263004 +12.3456789012 24.6913578024 4.1152263004 +12.3456789012 24.6913578024 4.1152263004 +123.4567890123 246.9135780246 41.1522630041 +123.4567890123 246.9135780246 41.1522630041 +123.4567890123 246.9135780246 41.1522630041 +1234.5678901235 2469.1357802470 411.522630041167 +1234.5678901235 2469.1357802470 411.522630041167 +1234.5678901235 2469.1357802470 411.522630041167 +12345.6789012346 24691.3578024692 4115.226300411533 +12345.6789012346 24691.3578024692 4115.226300411533 +123456.7890123456 246913.5780246912 41152.2630041152 +123456.7890123457 246913.5780246914 41152.263004115233 +1234567.890123456 2469135.780246912 411522.630041152 +1234567.8901234568 2469135.7802469136 411522.630041152267 +12345678.90123456 24691357.80246912 4115226.30041152 +12345678.9012345679 24691357.8024691358 4115226.300411522633 +123456789.0123456 246913578.0246912 41152263.0041152 +123456789.0123456789 246913578.0246913578 41152263.0041152263 +1234567890.123456 2469135780.246912 411522630.041152 +1234567890.1234567890 2469135780.2469135780 411522630.041152263 +PREHOOK: query: SELECT dec, dec / 9 FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec, dec / 9 FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +0.0000000000 0 +0.0000000000 0 +0.0000000000 0 +0.0000000000 0 +0 0 +0.1234567890 0.013717421 +0.1234567890 0.013717421 +1.2345678901 0.137174210011 +1.2345678901 0.137174210011 +1.2345678901 0.137174210011 +12.3456789012 1.371742100133 +12.3456789012 1.371742100133 +12.3456789012 1.371742100133 +123.4567890123 13.717421001367 +123.4567890123 13.717421001367 +123.4567890123 13.717421001367 +1234.5678901235 137.174210013722 +1234.5678901235 137.174210013722 +1234.5678901235 137.174210013722 +12345.6789012346 1371.742100137178 +12345.6789012346 1371.742100137178 +123456.7890123456 13717.421001371733 +123456.7890123457 13717.421001371744 +1234567.890123456 137174.210013717333 +1234567.8901234568 137174.210013717422 +12345678.90123456 1371742.100137173333 +12345678.9012345679 1371742.100137174211 +123456789.0123456 13717421.001371733333 +123456789.0123456789 13717421.0013717421 +1234567890.123456 137174210.013717333333 +1234567890.1234567890 137174210.013717421 +PREHOOK: query: SELECT dec, dec / 27 FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec, dec / 27 FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +0.0000000000 0 +0.0000000000 0 +0.0000000000 0 +0.0000000000 0 +0 0 +0.1234567890 0.0045724736667 +0.1234567890 0.0045724736667 +1.2345678901 0.0457247366704 +1.2345678901 0.0457247366704 +1.2345678901 0.0457247366704 +12.3456789012 0.4572473667111 +12.3456789012 0.4572473667111 +12.3456789012 0.4572473667111 +123.4567890123 4.5724736671222 +123.4567890123 4.5724736671222 +123.4567890123 4.5724736671222 +1234.5678901235 45.7247366712407 +1234.5678901235 45.7247366712407 +1234.5678901235 45.7247366712407 +12345.6789012346 457.2473667123926 +12345.6789012346 457.2473667123926 +123456.7890123456 4572.4736671239111 +123456.7890123457 4572.4736671239148 +1234567.890123456 45724.7366712391111 +1234567.8901234568 45724.7366712391407 +12345678.90123456 457247.3667123911111 +12345678.9012345679 457247.3667123914037 +123456789.0123456 4572473.6671239111111 +123456789.0123456789 4572473.6671239140333 +1234567890.123456 45724736.6712391111111 +1234567890.1234567890 45724736.6712391403333 +PREHOOK: query: SELECT dec, dec * dec FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec, dec * dec FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +0.0000000000 0.00000000000000000000 +0.0000000000 0.00000000000000000000 +0.0000000000 0.00000000000000000000 +0.0000000000 0.00000000000000000000 +0 0 +0.1234567890 0.01524157875019052100 +0.1234567890 0.01524157875019052100 +1.2345678901 1.52415787526596567801 +1.2345678901 1.52415787526596567801 +1.2345678901 1.52415787526596567801 +12.3456789012 152.41578753153483936144 +12.3456789012 152.41578753153483936144 +12.3456789012 152.41578753153483936144 +123.4567890123 15241.57875322755800955129 +123.4567890123 15241.57875322755800955129 +123.4567890123 15241.57875322755800955129 +1234.5678901235 1524157.87532399036884525225 +1234.5678901235 1524157.87532399036884525225 +1234.5678901235 1524157.87532399036884525225 +12345.6789012346 152415787.53238916034140423716 +12345.6789012346 152415787.53238916034140423716 +123456.7890123456 15241578753.23881726870921383936 +123456.7890123457 15241578753.23884196006701630849 +1234567.890123456 1524157875323.881726870921383936 +1234567.8901234568 1524157875323.88370217954558146624 +12345678.90123456 152415787532388.1726870921383936 +12345678.9012345679 152415787532388.36774881877789971041 +123456789.0123456 15241578753238817.26870921383936 +123456789.0123456789 15241578753238836.75019051998750190521 +1234567890.123456 NULL +1234567890.1234567890 NULL +PREHOOK: query: EXPLAIN SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_precision + Statistics: Num rows: 75 Data size: 3472 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dec (type: decimal(20,10)) + outputColumnNames: dec + Statistics: Num rows: 75 Data size: 3472 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: avg(dec), sum(dec) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: struct), _col1 (type: decimal(30,10)) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: avg(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: decimal(24,14)), _col1 (type: decimal(30,10)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +88499534.57586576220645 2743485571.8518386284 +PREHOOK: query: SELECT dec * cast('12345678901234567890.12345678' as decimal(38,18)) FROM DECIMAL_PRECISION LIMIT 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec * cast('12345678901234567890.12345678' as decimal(38,18)) FROM DECIMAL_PRECISION LIMIT 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL +PREHOOK: query: SELECT * from DECIMAL_PRECISION WHERE dec > cast('1234567890123456789012345678.12345678' as decimal(38,18)) LIMIT 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT * from DECIMAL_PRECISION WHERE dec > cast('1234567890123456789012345678.12345678' as decimal(38,18)) LIMIT 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +PREHOOK: query: SELECT dec * 12345678901234567890.12345678 FROM DECIMAL_PRECISION LIMIT 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec * 12345678901234567890.12345678 FROM DECIMAL_PRECISION LIMIT 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL +PREHOOK: query: SELECT MIN(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT MIN(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +12345678901234567890.12345678 +PREHOOK: query: SELECT COUNT(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT COUNT(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +75 +PREHOOK: query: DROP TABLE DECIMAL_PRECISION_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_precision_txt +PREHOOK: Output: default@decimal_precision_txt +POSTHOOK: query: DROP TABLE DECIMAL_PRECISION_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_precision_txt +POSTHOOK: Output: default@decimal_precision_txt +PREHOOK: query: DROP TABLE DECIMAL_PRECISION +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_precision +PREHOOK: Output: default@decimal_precision +POSTHOOK: query: DROP TABLE DECIMAL_PRECISION +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_precision +POSTHOOK: Output: default@decimal_precision diff --git ql/src/test/results/clientpositive/tez/vector_decimal_trailing.q.out ql/src/test/results/clientpositive/tez/vector_decimal_trailing.q.out new file mode 100644 index 0000000..cb0b5a2 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_trailing.q.out @@ -0,0 +1,121 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_TRAILING_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_TRAILING_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_TRAILING +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_TRAILING +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_TRAILING_txt ( + id int, + a decimal(10,4), + b decimal(15,8) + ) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ',' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_TRAILING_txt +POSTHOOK: query: CREATE TABLE DECIMAL_TRAILING_txt ( + id int, + a decimal(10,4), + b decimal(15,8) + ) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ',' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_TRAILING_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv10.txt' INTO TABLE DECIMAL_TRAILING_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_trailing_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv10.txt' INTO TABLE DECIMAL_TRAILING_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_trailing_txt +PREHOOK: query: CREATE TABLE DECIMAL_TRAILING ( + id int, + a decimal(10,4), + b decimal(15,8) + ) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_TRAILING +POSTHOOK: query: CREATE TABLE DECIMAL_TRAILING ( + id int, + a decimal(10,4), + b decimal(15,8) + ) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_TRAILING +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_TRAILING SELECT * FROM DECIMAL_TRAILING_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_trailing_txt +PREHOOK: Output: default@decimal_trailing +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_TRAILING SELECT * FROM DECIMAL_TRAILING_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_trailing_txt +POSTHOOK: Output: default@decimal_trailing +POSTHOOK: Lineage: decimal_trailing.a SIMPLE [(decimal_trailing_txt)decimal_trailing_txt.FieldSchema(name:a, type:decimal(10,4), comment:null), ] +POSTHOOK: Lineage: decimal_trailing.b SIMPLE [(decimal_trailing_txt)decimal_trailing_txt.FieldSchema(name:b, type:decimal(15,8), comment:null), ] +POSTHOOK: Lineage: decimal_trailing.id SIMPLE [(decimal_trailing_txt)decimal_trailing_txt.FieldSchema(name:id, type:int, comment:null), ] +PREHOOK: query: SELECT * FROM DECIMAL_TRAILING ORDER BY id +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_trailing +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_TRAILING ORDER BY id +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_trailing +#### A masked pattern was here #### +0 0 0 +1 0 0 +2 NULL NULL +3 1.0000 1.00000000 +4 10.0000 10.00000000 +5 100.0000 100.00000000 +6 1000.0000 1000.00000000 +7 10000.0000 10000.00000000 +8 100000.0000 100000.00000000 +9 NULL 1000000.00000000 +10 NULL NULL +11 NULL NULL +12 NULL NULL +13 NULL NULL +14 NULL NULL +15 NULL NULL +16 NULL NULL +17 NULL NULL +18 1.0000 1.00000000 +19 10.000 10.0000000 +20 100.00 100.000000 +21 1000.0 1000.00000 +22 100000 10000.0000 +23 0.0000 0.00000000 +24 0.000 0.0000000 +25 0.00 0.000000 +26 0.0 0.00000 +27 0 0.00000 +28 12313.2000 134134.31252500 +29 99999.9990 134134.31242553 +PREHOOK: query: DROP TABLE DECIMAL_TRAILING_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_trailing_txt +PREHOOK: Output: default@decimal_trailing_txt +POSTHOOK: query: DROP TABLE DECIMAL_TRAILING_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_trailing_txt +POSTHOOK: Output: default@decimal_trailing_txt +PREHOOK: query: DROP TABLE DECIMAL_TRAILING +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_trailing +PREHOOK: Output: default@decimal_trailing +POSTHOOK: query: DROP TABLE DECIMAL_TRAILING +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_trailing +POSTHOOK: Output: default@decimal_trailing diff --git ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out new file mode 100644 index 0000000..8e847f6 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out @@ -0,0 +1,2769 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_UDF_txt (key decimal(20,10), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_UDF_txt +POSTHOOK: query: CREATE TABLE DECIMAL_UDF_txt (key decimal(20,10), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_UDF_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_udf_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_udf_txt +PREHOOK: query: CREATE TABLE DECIMAL_UDF (key decimal(20,10), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_UDF +POSTHOOK: query: CREATE TABLE DECIMAL_UDF (key decimal(20,10), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_UDF +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF SELECT * FROM DECIMAL_UDF_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf_txt +PREHOOK: Output: default@decimal_udf +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF SELECT * FROM DECIMAL_UDF_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf_txt +POSTHOOK: Output: default@decimal_udf +POSTHOOK: Lineage: decimal_udf.key SIMPLE [(decimal_udf_txt)decimal_udf_txt.FieldSchema(name:key, type:decimal(20,10), comment:null), ] +POSTHOOK: Lineage: decimal_udf.value SIMPLE [(decimal_udf_txt)decimal_udf_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: -- addition +EXPLAIN SELECT key + key FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- addition +EXPLAIN SELECT key + key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key + key) (type: decimal(21,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key + key FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key + key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-8800 +NULL +0.0000000000 +0 +200 +20 +2 +0.2 +0.02 +400 +40 +4 +0 +0.4 +0.04 +0.6 +0.66 +0.666 +-0.6 +-0.66 +-0.666 +2.0 +4 +6.28 +-2.24 +-2.24 +-2.244 +2.24 +2.244 +248.00 +250.4 +-2510.98 +6.28 +6.28 +6.280 +2.0000000000 +-2469135780.2469135780 +2469135780.2469135600 +PREHOOK: query: EXPLAIN SELECT key + value FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key + value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key + value) (type: decimal(21,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key + value FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key + value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +0 +NULL +0.0000000000 +0 +200 +20 +2 +0.1 +0.01 +400 +40 +4 +0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +2.0 +4 +6.14 +-2.12 +-2.12 +-12.122 +2.12 +2.122 +248.00 +250.2 +-2510.49 +6.14 +6.14 +7.140 +2.0000000000 +-2469135780.1234567890 +2469135780.1234567800 +PREHOOK: query: EXPLAIN SELECT key + (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key + (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key + (value / 2)) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key + (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key + (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-2200.0 +NULL +0.0 +0.0 +150.0 +15.0 +1.5 +0.1 +0.01 +300.0 +30.0 +3.0 +0.0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +1.5 +3.0 +4.640000000000001 +-1.62 +-1.62 +-6.622 +1.62 +1.622 +186.0 +187.7 +-1882.99 +4.640000000000001 +4.640000000000001 +5.140000000000001 +1.5 +-1.8518518351234567E9 +1.8518518351234567E9 +PREHOOK: query: EXPLAIN SELECT key + '1.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key + '1.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key + '1.0') (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key + '1.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key + '1.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4399.0 +NULL +1.0 +1.0 +101.0 +11.0 +2.0 +1.1 +1.01 +201.0 +21.0 +3.0 +1.0 +1.2 +1.02 +1.3 +1.33 +1.333 +0.7 +0.6699999999999999 +0.667 +2.0 +3.0 +4.140000000000001 +-0.1200000000000001 +-0.1200000000000001 +-0.12200000000000011 +2.12 +2.122 +125.0 +126.2 +-1254.49 +4.140000000000001 +4.140000000000001 +4.140000000000001 +2.0 +-1.2345678891234567E9 +1.2345678911234567E9 +PREHOOK: query: -- substraction +EXPLAIN SELECT key - key FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- substraction +EXPLAIN SELECT key - key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key - key) (type: decimal(21,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key - key FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key - key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +0 +NULL +0.0000000000 +0 +0 +0 +0 +0.0 +0.00 +0 +0 +0 +0 +0.0 +0.00 +0.0 +0.00 +0.000 +0.0 +0.00 +0.000 +0.0 +0 +0.00 +0.00 +0.00 +0.000 +0.00 +0.000 +0.00 +0.0 +0.00 +0.00 +0.00 +0.000 +0.0000000000 +0.0000000000 +0.0000000000 +PREHOOK: query: EXPLAIN SELECT key - value FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key - value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key - value) (type: decimal(21,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key - value FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key - value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-8800 +NULL +0.0000000000 +0 +0 +0 +0 +0.1 +0.01 +0 +0 +0 +0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +0.0 +0 +0.14 +-0.12 +-0.12 +9.878 +0.12 +0.122 +0.00 +0.2 +-0.49 +0.14 +0.14 +-0.860 +0.0000000000 +-0.1234567890 +0.1234567800 +PREHOOK: query: EXPLAIN SELECT key - (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key - (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key - (value / 2)) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key - (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key - (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-6600.0 +NULL +0.0 +0.0 +50.0 +5.0 +0.5 +0.1 +0.01 +100.0 +10.0 +1.0 +0.0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +0.5 +1.0 +1.6400000000000001 +-0.6200000000000001 +-0.6200000000000001 +4.378 +0.6200000000000001 +0.6220000000000001 +62.0 +62.7 +-627.99 +1.6400000000000001 +1.6400000000000001 +1.1400000000000001 +0.5 +-6.172839451234567E8 +6.172839451234567E8 +PREHOOK: query: EXPLAIN SELECT key - '1.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key - '1.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key - '1.0') (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key - '1.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key - '1.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4401.0 +NULL +-1.0 +-1.0 +99.0 +9.0 +0.0 +-0.9 +-0.99 +199.0 +19.0 +1.0 +-1.0 +-0.8 +-0.98 +-0.7 +-0.6699999999999999 +-0.667 +-1.3 +-1.33 +-1.333 +0.0 +1.0 +2.14 +-2.12 +-2.12 +-2.122 +0.1200000000000001 +0.12200000000000011 +123.0 +124.2 +-1256.49 +2.14 +2.14 +2.14 +0.0 +-1.2345678911234567E9 +1.2345678891234567E9 +PREHOOK: query: -- multiplication +EXPLAIN SELECT key * key FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- multiplication +EXPLAIN SELECT key * key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key * key) (type: decimal(38,20)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key * key FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key * key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +19360000 +NULL +0.00000000000000000000 +0 +10000 +100 +1 +0.01 +0.0001 +40000 +400 +4 +0 +0.04 +0.0004 +0.09 +0.1089 +0.110889 +0.09 +0.1089 +0.110889 +1.00 +4 +9.8596 +1.2544 +1.2544 +1.258884 +1.2544 +1.258884 +15376.0000 +15675.04 +1576255.1401 +9.8596 +9.8596 +9.859600 +1.00000000000000000000 +NULL +NULL +PREHOOK: query: EXPLAIN SELECT key, value FROM DECIMAL_UDF where key * value > 0 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key, value FROM DECIMAL_UDF where key * value > 0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key * value) > 0) (type: boolean) + Statistics: Num rows: 12 Data size: 1356 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 1356 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 1356 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key, value FROM DECIMAL_UDF where key * value > 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key, value FROM DECIMAL_UDF where key * value > 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +100 100 +10 10 +1 1 +200 200 +20 20 +2 2 +1.0 1 +2 2 +3.14 3 +-1.12 -1 +-1.12 -1 +-1.122 -11 +1.12 1 +1.122 1 +124.00 124 +125.2 125 +-1255.49 -1255 +3.14 3 +3.14 3 +3.140 4 +1.0000000000 1 +-1234567890.1234567890 -1234567890 +1234567890.1234567800 1234567890 +PREHOOK: query: EXPLAIN SELECT key * value FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key * value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key * value) (type: decimal(31,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key * value FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key * value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-19360000 +NULL +0.0000000000 +0 +10000 +100 +1 +0.0 +0.00 +40000 +400 +4 +0 +0.0 +0.00 +0.0 +0.00 +0.000 +0.0 +0.00 +0.000 +1.0 +4 +9.42 +1.12 +1.12 +12.342 +1.12 +1.122 +15376.00 +15650.0 +1575639.95 +9.42 +9.42 +12.560 +1.0000000000 +1524157875171467887.5019052100 +1524157875171467876.3907942000 +PREHOOK: query: EXPLAIN SELECT key * (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key * (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key * (value / 2)) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key * (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key * (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-9680000.0 +NULL +0.0 +0.0 +5000.0 +50.0 +0.5 +0.0 +0.0 +20000.0 +200.0 +2.0 +0.0 +0.0 +0.0 +0.0 +0.0 +0.0 +-0.0 +-0.0 +-0.0 +0.5 +2.0 +4.71 +0.56 +0.56 +6.171 +0.56 +0.561 +7688.0 +7825.0 +787819.975 +4.71 +4.71 +6.28 +0.5 +7.6207893758573389E17 +7.6207893758573389E17 +PREHOOK: query: EXPLAIN SELECT key * '2.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key * '2.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key * '2.0') (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key * '2.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key * '2.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-8800.0 +NULL +0.0 +0.0 +200.0 +20.0 +2.0 +0.2 +0.02 +400.0 +40.0 +4.0 +0.0 +0.4 +0.04 +0.6 +0.66 +0.666 +-0.6 +-0.66 +-0.666 +2.0 +4.0 +6.28 +-2.24 +-2.24 +-2.244 +2.24 +2.244 +248.0 +250.4 +-2510.98 +6.28 +6.28 +6.28 +2.0 +-2.4691357802469134E9 +2.4691357802469134E9 +PREHOOK: query: -- division +EXPLAIN SELECT key / 0 FROM DECIMAL_UDF limit 1 +PREHOOK: type: QUERY +POSTHOOK: query: -- division +EXPLAIN SELECT key / 0 FROM DECIMAL_UDF limit 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key / 0) (type: decimal(22,12)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 1 + Statistics: Num rows: 1 Data size: 113 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 113 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key / 0 FROM DECIMAL_UDF limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key / 0 FROM DECIMAL_UDF limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +NULL +PREHOOK: query: EXPLAIN SELECT key / NULL FROM DECIMAL_UDF limit 1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key / NULL FROM DECIMAL_UDF limit 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key / null) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 1 + Statistics: Num rows: 1 Data size: 113 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 113 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key / NULL FROM DECIMAL_UDF limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key / NULL FROM DECIMAL_UDF limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +NULL +PREHOOK: query: EXPLAIN SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key <> 0) (type: boolean) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key / key) (type: decimal(38,24)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +PREHOOK: query: EXPLAIN SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (value <> 0) (type: boolean) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key / value) (type: decimal(31,21)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-1 +1 +1 +1 +1 +1 +1 +1 +1 +1.046666666666666666667 +1.12 +1.12 +0.102 +1.12 +1.122 +1 +1.0016 +1.000390438247011952191 +1.046666666666666666667 +1.046666666666666666667 +0.785 +1 +1.0000000001 +1.000000000099999992710 +PREHOOK: query: EXPLAIN SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (value <> 0) (type: boolean) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key / (value / 2)) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-2.0 +2.0 +2.0 +2.0 +2.0 +2.0 +2.0 +2.0 +2.0 +2.0933333333333333 +2.24 +2.24 +0.20400000000000001 +2.24 +2.244 +2.0 +2.0032 +2.000780876494024 +2.0933333333333333 +2.0933333333333333 +1.57 +2.0 +2.0000000002 +2.0000000002 +PREHOOK: query: EXPLAIN SELECT 1 + (key / '2.0') FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT 1 + (key / '2.0') FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (1 + (key / '2.0')) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT 1 + (key / '2.0') FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT 1 + (key / '2.0') FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-2199.0 +NULL +1.0 +1.0 +51.0 +6.0 +1.5 +1.05 +1.005 +101.0 +11.0 +2.0 +1.0 +1.1 +1.01 +1.15 +1.165 +1.1665 +0.85 +0.835 +0.8335 +1.5 +2.0 +2.5700000000000003 +0.43999999999999995 +0.43999999999999995 +0.43899999999999995 +1.56 +1.561 +63.0 +63.6 +-626.745 +2.5700000000000003 +2.5700000000000003 +2.5700000000000003 +1.5 +-6.172839440617284E8 +6.172839460617284E8 +PREHOOK: query: -- abs +EXPLAIN SELECT abs(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- abs +EXPLAIN SELECT abs(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: abs(key) (type: decimal(38,18)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT abs(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT abs(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +4400 +NULL +0.0000000000 +0 +100 +10 +1 +0.1 +0.01 +200 +20 +2 +0 +0.2 +0.02 +0.3 +0.33 +0.333 +0.3 +0.33 +0.333 +1.0 +2 +3.14 +1.12 +1.12 +1.122 +1.12 +1.122 +124.00 +125.2 +1255.49 +3.14 +3.14 +3.140 +1.0000000000 +1234567890.1234567890 +1234567890.1234567800 +PREHOOK: query: -- avg +EXPLAIN SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value +PREHOOK: type: QUERY +POSTHOOK: query: -- avg +EXPLAIN SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: int), key (type: decimal(20,10)) + outputColumnNames: value, key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(key), count(key), avg(key) + keys: value (type: int) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: decimal(30,10)), _col2 (type: bigint), _col3 (type: struct) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), count(VALUE._col1), avg(VALUE._col2) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), (_col1 / _col2) (type: decimal(38,23)), _col3 (type: decimal(24,14)), _col1 (type: decimal(30,10)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: decimal(38,23)), _col2 (type: decimal(24,14)), _col3 (type: decimal(30,10)) + Reducer 3 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: decimal(38,23)), VALUE._col1 (type: decimal(24,14)), VALUE._col2 (type: decimal(30,10)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-1234567890 -1234567890.123456789 -1234567890.123456789 -1234567890.1234567890 +-1255 -1255.49 -1255.49 -1255.49 +-11 -1.122 -1.122 -1.122 +-1 -1.12 -1.12 -2.24 +0 0.02538461538461538461538 0.02538461538462 0.3300000000 +1 1.0484 1.0484 5.2420000000 +2 2 2 4 +3 3.14 3.14 9.42 +4 3.14 3.14 3.140 +10 10 10 10 +20 20 20 20 +100 100 100 100 +124 124 124 124.00 +125 125.2 125.2 125.2 +200 200 200 200 +4400 -4400 -4400 -4400 +1234567890 1234567890.12345678 1234567890.12345678 1234567890.1234567800 +PREHOOK: query: -- negative +EXPLAIN SELECT -key FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- negative +EXPLAIN SELECT -key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (- key) (type: decimal(20,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT -key FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT -key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +4400 +NULL +0.0000000000 +0 +-100 +-10 +-1 +-0.1 +-0.01 +-200 +-20 +-2 +0 +-0.2 +-0.02 +-0.3 +-0.33 +-0.333 +0.3 +0.33 +0.333 +-1.0 +-2 +-3.14 +1.12 +1.12 +1.122 +-1.12 +-1.122 +-124.00 +-125.2 +1255.49 +-3.14 +-3.14 +-3.140 +-1.0000000000 +1234567890.1234567890 +-1234567890.1234567800 +PREHOOK: query: -- positive +EXPLAIN SELECT +key FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- positive +EXPLAIN SELECT +key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Select Operator + expressions: key (type: decimal(20,10)) + outputColumnNames: _col0 + ListSink + +PREHOOK: query: SELECT +key FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT +key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4400 +NULL +0.0000000000 +0 +100 +10 +1 +0.1 +0.01 +200 +20 +2 +0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +1.0 +2 +3.14 +-1.12 +-1.12 +-1.122 +1.12 +1.122 +124.00 +125.2 +-1255.49 +3.14 +3.14 +3.140 +1.0000000000 +-1234567890.1234567890 +1234567890.1234567800 +PREHOOK: query: -- ceiling +EXPlAIN SELECT CEIL(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- ceiling +EXPlAIN SELECT CEIL(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ceil(key) (type: decimal(11,0)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT CEIL(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT CEIL(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4400 +NULL +0 +0 +100 +10 +1 +1 +1 +200 +20 +2 +0 +1 +1 +1 +1 +1 +0 +0 +0 +1 +2 +4 +-1 +-1 +-1 +2 +2 +124 +126 +-1255 +4 +4 +4 +1 +-1234567890 +1234567891 +PREHOOK: query: -- floor +EXPLAIN SELECT FLOOR(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- floor +EXPLAIN SELECT FLOOR(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: floor(key) (type: decimal(11,0)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT FLOOR(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT FLOOR(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4400 +NULL +0 +0 +100 +10 +1 +0 +0 +200 +20 +2 +0 +0 +0 +0 +0 +0 +-1 +-1 +-1 +1 +2 +3 +-2 +-2 +-2 +1 +1 +124 +125 +-1256 +3 +3 +3 +1 +-1234567891 +1234567890 +PREHOOK: query: -- round +EXPLAIN SELECT ROUND(key, 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- round +EXPLAIN SELECT ROUND(key, 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: round(key, 2) (type: decimal(13,2)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT ROUND(key, 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT ROUND(key, 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4400.00 +NULL +0.00 +0.00 +100.00 +10.00 +1.00 +0.10 +0.01 +200.00 +20.00 +2.00 +0.00 +0.20 +0.02 +0.30 +0.33 +0.33 +-0.30 +-0.33 +-0.33 +1.00 +2.00 +3.14 +-1.12 +-1.12 +-1.12 +1.12 +1.12 +124.00 +125.20 +-1255.49 +3.14 +3.14 +3.14 +1.00 +-1234567890.12 +1234567890.12 +PREHOOK: query: -- power +EXPLAIN SELECT POWER(key, 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- power +EXPLAIN SELECT POWER(key, 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: power(key, 2) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT POWER(key, 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT POWER(key, 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +1.936E7 +NULL +0.0 +0.0 +10000.0 +100.0 +1.0 +0.010000000000000002 +1.0E-4 +40000.0 +400.0 +4.0 +0.0 +0.04000000000000001 +4.0E-4 +0.09 +0.10890000000000001 +0.11088900000000002 +0.09 +0.10890000000000001 +0.11088900000000002 +1.0 +4.0 +9.8596 +1.2544000000000002 +1.2544000000000002 +1.2588840000000003 +1.2544000000000002 +1.2588840000000003 +15376.0 +15675.04 +1576255.1401 +9.8596 +9.8596 +9.8596 +1.0 +1.52415787532388352E18 +1.52415787532388352E18 +PREHOOK: query: -- modulo +EXPLAIN SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- modulo +EXPLAIN SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ((key + 1) % (key / 2)) (type: decimal(22,12)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-2199 +NULL +NULL +NULL +1 +1 +0.0 +0.00 +0.000 +1 +1 +0 +NULL +0.0 +0.00 +0.10 +0.010 +0.0010 +0.10 +0.010 +0.0010 +0.0 +0 +1.00 +-0.12 +-0.12 +-0.122 +0.44 +0.439 +1.00 +1.0 +-626.745 +1.00 +1.00 +1.000 +0.0000000000 +-617283944.0617283945 +1.0000000000 +PREHOOK: query: -- stddev, var +EXPLAIN SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value +PREHOOK: type: QUERY +POSTHOOK: query: -- stddev, var +EXPLAIN SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: int), key (type: decimal(20,10)) + outputColumnNames: value, key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: stddev(key), variance(key) + keys: value (type: int) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: struct), _col2 (type: struct) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: stddev(VALUE._col0), variance(VALUE._col1) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: double), _col2 (type: double) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-1234567890 0.0 0.0 +-1255 0.0 0.0 +-11 0.0 0.0 +-1 0.0 0.0 +0 0.22561046704494161 0.050900082840236685 +1 0.05928102563215321 0.0035142400000000066 +2 0.0 0.0 +3 0.0 0.0 +4 0.0 0.0 +10 0.0 0.0 +20 0.0 0.0 +100 0.0 0.0 +124 0.0 0.0 +125 0.0 0.0 +200 0.0 0.0 +4400 0.0 0.0 +1234567890 0.0 0.0 +PREHOOK: query: -- stddev_samp, var_samp +EXPLAIN SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value +PREHOOK: type: QUERY +POSTHOOK: query: -- stddev_samp, var_samp +EXPLAIN SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: int), key (type: decimal(20,10)) + outputColumnNames: value, key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: stddev_samp(key), var_samp(key) + keys: value (type: int) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: struct), _col2 (type: struct) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: stddev_samp(VALUE._col0), var_samp(VALUE._col1) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: double), _col2 (type: double) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-1234567890 0.0 0.0 +-1255 0.0 0.0 +-11 0.0 0.0 +-1 0.0 0.0 +0 0.2348228191855647 0.055141756410256405 +1 0.06627820154470102 0.004392800000000008 +2 0.0 0.0 +3 0.0 0.0 +4 0.0 0.0 +10 0.0 0.0 +20 0.0 0.0 +100 0.0 0.0 +124 0.0 0.0 +125 0.0 0.0 +200 0.0 0.0 +4400 0.0 0.0 +1234567890 0.0 0.0 +PREHOOK: query: -- histogram +EXPLAIN SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- histogram +EXPLAIN SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)) + outputColumnNames: key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: histogram_numeric(key, 3) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + value expressions: _col0 (type: array) + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: histogram_numeric(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: _col0 (type: array>) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +[{"x":-1.2345678901234567E9,"y":1.0},{"x":-144.50057142857142,"y":35.0},{"x":1.2345678901234567E9,"y":1.0}] +PREHOOK: query: -- min +EXPLAIN SELECT MIN(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- min +EXPLAIN SELECT MIN(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)) + outputColumnNames: key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: min(key) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(20,10)) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: min(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: decimal(20,10)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT MIN(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT MIN(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-1234567890.1234567890 +PREHOOK: query: -- max +EXPLAIN SELECT MAX(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- max +EXPLAIN SELECT MAX(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)) + outputColumnNames: key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: max(key) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(20,10)) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: max(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: decimal(20,10)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT MAX(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT MAX(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +1234567890.1234567800 +PREHOOK: query: -- count +EXPLAIN SELECT COUNT(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- count +EXPLAIN SELECT COUNT(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)) + outputColumnNames: key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(key) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT COUNT(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT COUNT(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +37 +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_udf_txt +PREHOOK: Output: default@decimal_udf_txt +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_udf_txt +POSTHOOK: Output: default@decimal_udf_txt +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_udf +PREHOOK: Output: default@decimal_udf +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_udf +POSTHOOK: Output: default@decimal_udf diff --git ql/src/test/results/clientpositive/tez/vector_decimal_udf2.q.out ql/src/test/results/clientpositive/tez/vector_decimal_udf2.q.out new file mode 100644 index 0000000..53ec5dc --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_decimal_udf2.q.out @@ -0,0 +1,187 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_UDF2_txt (key decimal(20,10), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_UDF2_txt +POSTHOOK: query: CREATE TABLE DECIMAL_UDF2_txt (key decimal(20,10), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_UDF2_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF2_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_udf2_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF2_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_udf2_txt +PREHOOK: query: CREATE TABLE DECIMAL_UDF2 (key decimal(20,10), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_UDF2 +POSTHOOK: query: CREATE TABLE DECIMAL_UDF2 (key decimal(20,10), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_UDF2 +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF2 SELECT * FROM DECIMAL_UDF2_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_txt +PREHOOK: Output: default@decimal_udf2 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF2 SELECT * FROM DECIMAL_UDF2_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_txt +POSTHOOK: Output: default@decimal_udf2 +POSTHOOK: Lineage: decimal_udf2.key SIMPLE [(decimal_udf2_txt)decimal_udf2_txt.FieldSchema(name:key, type:decimal(20,10), comment:null), ] +POSTHOOK: Lineage: decimal_udf2.value SIMPLE [(decimal_udf2_txt)decimal_udf2_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: EXPLAIN +SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2 WHERE key = 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2 WHERE key = 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf2 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key = 10) (type: boolean) + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: acos(key) (type: double), asin(key) (type: double), atan(key) (type: double), cos(key) (type: double), sin(key) (type: double), tan(key) (type: double), radians(key) (type: double) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2 WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2 WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2 +#### A masked pattern was here #### +NaN NaN 1.4711276743037347 -0.8390715290764524 -0.5440211108893698 0.6483608274590866 0.17453292519943295 +PREHOOK: query: EXPLAIN +SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2 WHERE key = 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2 WHERE key = 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: decimal_udf2 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key = 10) (type: boolean) + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: exp(key) (type: double), ln(key) (type: double), log(key) (type: double), log(key, key) (type: double), log(key, value) (type: double), log(value, key) (type: double), log10(key) (type: double), sqrt(key) (type: double) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2 WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2 WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2 +#### A masked pattern was here #### +22026.465794806718 2.302585092994046 2.302585092994046 1.0 1.0 1.0 1.0 3.1622776601683795 +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_udf2_txt +PREHOOK: Output: default@decimal_udf2_txt +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_udf2_txt +POSTHOOK: Output: default@decimal_udf2_txt +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_udf2 +PREHOOK: Output: default@decimal_udf2 +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_udf2 +POSTHOOK: Output: default@decimal_udf2 diff --git ql/src/test/results/clientpositive/tez/vectorization_limit.q.out ql/src/test/results/clientpositive/tez/vectorization_limit.q.out new file mode 100644 index 0000000..cd778e2 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vectorization_limit.q.out @@ -0,0 +1,565 @@ +WARNING: Comparing a bigint and a double may result in a loss of precision. +PREHOOK: query: explain SELECT cbigint, cdouble FROM alltypesorc WHERE cbigint < cdouble and cint > 0 limit 7 +PREHOOK: type: QUERY +POSTHOOK: query: explain SELECT cbigint, cdouble FROM alltypesorc WHERE cbigint < cdouble and cint > 0 limit 7 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 7 + Processor Tree: + TableScan + alias: alltypesorc + Filter Operator + predicate: ((cbigint < cdouble) and (cint > 0)) (type: boolean) + Select Operator + expressions: cbigint (type: bigint), cdouble (type: double) + outputColumnNames: _col0, _col1 + Limit + Number of rows: 7 + ListSink + +WARNING: Comparing a bigint and a double may result in a loss of precision. +PREHOOK: query: SELECT cbigint, cdouble FROM alltypesorc WHERE cbigint < cdouble and cint > 0 limit 7 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: SELECT cbigint, cdouble FROM alltypesorc WHERE cbigint < cdouble and cint > 0 limit 7 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +-1887561756 1839.0 +-1887561756 -10011.0 +-1887561756 -13877.0 +-1887561756 10361.0 +-1887561756 -8881.0 +-1887561756 -2281.0 +-1887561756 9531.0 +PREHOOK: query: -- HIVE-3562 Some limit can be pushed down to map stage - c/p parts from limit_pushdown + +explain +select ctinyint,cdouble,csmallint from alltypesorc where ctinyint is not null order by ctinyint,cdouble limit 20 +PREHOOK: type: QUERY +POSTHOOK: query: -- HIVE-3562 Some limit can be pushed down to map stage - c/p parts from limit_pushdown + +explain +select ctinyint,cdouble,csmallint from alltypesorc where ctinyint is not null order by ctinyint,cdouble limit 20 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: alltypesorc + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ctinyint is not null (type: boolean) + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ctinyint (type: tinyint), cdouble (type: double), csmallint (type: smallint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: tinyint), _col1 (type: double) + sort order: ++ + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.3 + value expressions: _col2 (type: smallint) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: double), VALUE._col0 (type: smallint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 20 + Statistics: Num rows: 20 Data size: 600 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 20 Data size: 600 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 20 + Processor Tree: + ListSink + +PREHOOK: query: select ctinyint,cdouble,csmallint from alltypesorc where ctinyint is not null order by ctinyint,cdouble limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: select ctinyint,cdouble,csmallint from alltypesorc where ctinyint is not null order by ctinyint,cdouble limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +-64 -15920.0 -15920 +-64 -10462.0 -10462 +-64 -9842.0 -9842 +-64 -8080.0 -8080 +-64 -7196.0 -7196 +-64 -7196.0 -7196 +-64 -7196.0 -7196 +-64 -7196.0 -7196 +-64 -7196.0 -7196 +-64 -7196.0 -7196 +-64 -7196.0 -7196 +-64 -6907.0 -6907 +-64 -4803.0 -4803 +-64 -4040.0 -4040 +-64 -4018.0 -4018 +-64 -3586.0 -3586 +-64 -3097.0 -3097 +-64 -2919.0 -2919 +-64 -1600.0 -1600 +-64 -200.0 -200 +PREHOOK: query: -- deduped RS +explain +select ctinyint,avg(cdouble + 1) from alltypesorc group by ctinyint order by ctinyint limit 20 +PREHOOK: type: QUERY +POSTHOOK: query: -- deduped RS +explain +select ctinyint,avg(cdouble + 1) from alltypesorc group by ctinyint order by ctinyint limit 20 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: alltypesorc + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ctinyint (type: tinyint), cdouble (type: double) + outputColumnNames: ctinyint, cdouble + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: avg((cdouble + 1)) + keys: ctinyint (type: tinyint) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: tinyint) + sort order: + + Map-reduce partition columns: _col0 (type: tinyint) + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.3 + value expressions: _col1 (type: struct) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: avg(VALUE._col0) + keys: KEY._col0 (type: tinyint) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: tinyint), _col1 (type: double) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 20 + Statistics: Num rows: 20 Data size: 600 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 20 Data size: 600 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 20 + Processor Tree: + ListSink + +PREHOOK: query: select ctinyint,avg(cdouble + 1) from alltypesorc group by ctinyint order by ctinyint limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: select ctinyint,avg(cdouble + 1) from alltypesorc group by ctinyint order by ctinyint limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +NULL 9370.0945309795 +-64 373.52941176470586 +-63 2178.7272727272725 +-62 245.69387755102042 +-61 914.3404255319149 +-60 1071.82 +-59 318.27272727272725 +-58 3483.2444444444445 +-57 1867.0535714285713 +-56 2595.818181818182 +-55 2385.595744680851 +-54 2712.7272727272725 +-53 -532.7567567567568 +-52 2810.705882352941 +-51 -96.46341463414635 +-50 -960.0192307692307 +-49 768.7659574468086 +-48 1672.909090909091 +-47 -574.6428571428571 +-46 3033.55 +PREHOOK: query: -- distincts +explain +select distinct(ctinyint) from alltypesorc limit 20 +PREHOOK: type: QUERY +POSTHOOK: query: -- distincts +explain +select distinct(ctinyint) from alltypesorc limit 20 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: alltypesorc + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ctinyint (type: tinyint) + outputColumnNames: ctinyint + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: ctinyint (type: tinyint) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: tinyint) + sort order: + + Map-reduce partition columns: _col0 (type: tinyint) + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.3 + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: tinyint) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 20 + Statistics: Num rows: 20 Data size: 600 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 20 Data size: 600 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 20 + Processor Tree: + ListSink + +PREHOOK: query: select distinct(ctinyint) from alltypesorc limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: select distinct(ctinyint) from alltypesorc limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +NULL +-64 +-63 +-62 +-61 +-60 +-59 +-58 +-57 +-56 +-55 +-54 +-53 +-52 +-51 +-50 +-49 +-48 +-47 +-46 +PREHOOK: query: explain +select ctinyint, count(distinct(cdouble)) from alltypesorc group by ctinyint limit 20 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select ctinyint, count(distinct(cdouble)) from alltypesorc group by ctinyint limit 20 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: alltypesorc + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ctinyint (type: tinyint), cdouble (type: double) + outputColumnNames: ctinyint, cdouble + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(DISTINCT cdouble) + keys: ctinyint (type: tinyint), cdouble (type: double) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: tinyint), _col1 (type: double) + sort order: ++ + Map-reduce partition columns: _col0 (type: tinyint) + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.3 + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: count(DISTINCT KEY._col1:0._col0) + keys: KEY._col0 (type: tinyint) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: tinyint), _col1 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 20 + Statistics: Num rows: 20 Data size: 600 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 20 Data size: 600 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 20 + Processor Tree: + ListSink + +PREHOOK: query: select ctinyint, count(distinct(cdouble)) from alltypesorc group by ctinyint limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: select ctinyint, count(distinct(cdouble)) from alltypesorc group by ctinyint limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +NULL 2932 +-64 24 +-63 19 +-62 27 +-61 25 +-60 27 +-59 31 +-58 23 +-57 35 +-56 36 +-55 29 +-54 26 +-53 22 +-52 33 +-51 21 +-50 30 +-49 26 +-48 29 +-47 22 +-46 24 +PREHOOK: query: -- limit zero +explain +select ctinyint,cdouble from alltypesorc order by ctinyint limit 0 +PREHOOK: type: QUERY +POSTHOOK: query: -- limit zero +explain +select ctinyint,cdouble from alltypesorc order by ctinyint limit 0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 0 + Processor Tree: + ListSink + +PREHOOK: query: select ctinyint,cdouble from alltypesorc order by ctinyint limit 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: select ctinyint,cdouble from alltypesorc order by ctinyint limit 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +PREHOOK: query: -- 2MR (applied to last RS) +explain +select cdouble, sum(ctinyint) as sum from alltypesorc where ctinyint is not null group by cdouble order by sum, cdouble limit 20 +PREHOOK: type: QUERY +POSTHOOK: query: -- 2MR (applied to last RS) +explain +select cdouble, sum(ctinyint) as sum from alltypesorc where ctinyint is not null group by cdouble order by sum, cdouble limit 20 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: alltypesorc + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ctinyint is not null (type: boolean) + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: cdouble (type: double), ctinyint (type: tinyint) + outputColumnNames: cdouble, ctinyint + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(ctinyint) + keys: cdouble (type: double) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: double) + sort order: + + Map-reduce partition columns: _col0 (type: double) + Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + keys: KEY._col0 (type: double) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3072 Data size: 94309 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: double), _col1 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3072 Data size: 94309 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: bigint), _col0 (type: double) + sort order: ++ + Statistics: Num rows: 3072 Data size: 94309 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.3 + Execution mode: vectorized + Reducer 3 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey1 (type: double), KEY.reducesinkkey0 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3072 Data size: 94309 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 20 + Statistics: Num rows: 20 Data size: 600 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 20 Data size: 600 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 20 + Processor Tree: + ListSink + +PREHOOK: query: select cdouble, sum(ctinyint) as sum from alltypesorc where ctinyint is not null group by cdouble order by sum, cdouble limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: select cdouble, sum(ctinyint) as sum from alltypesorc where ctinyint is not null group by cdouble order by sum, cdouble limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +NULL -32768 +-7196.0 -2009 +15601.0 -1733 +4811.0 -115 +-11322.0 -101 +-1121.0 -89 +7705.0 -88 +3520.0 -86 +-8118.0 -80 +5241.0 -80 +-11492.0 -78 +9452.0 -76 +557.0 -75 +10496.0 -67 +-15920.0 -64 +-10462.0 -64 +-9842.0 -64 +-8080.0 -64 +-6907.0 -64 +-4803.0 -64 diff --git ql/src/test/results/clientpositive/tez/vectorized_casts.q.out ql/src/test/results/clientpositive/tez/vectorized_casts.q.out new file mode 100644 index 0000000..4be6100 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vectorized_casts.q.out @@ -0,0 +1,337 @@ +PREHOOK: query: -- Test type casting in vectorized mode to verify end-to-end functionality. + +explain +select +-- to boolean + cast (ctinyint as boolean) + ,cast (csmallint as boolean) + ,cast (cint as boolean) + ,cast (cbigint as boolean) + ,cast (cfloat as boolean) + ,cast (cdouble as boolean) + ,cast (cboolean1 as boolean) + ,cast (cbigint * 0 as boolean) + ,cast (ctimestamp1 as boolean) + ,cast (cstring1 as boolean) +-- to int family + ,cast (ctinyint as int) + ,cast (csmallint as int) + ,cast (cint as int) + ,cast (cbigint as int) + ,cast (cfloat as int) + ,cast (cdouble as int) + ,cast (cboolean1 as int) + ,cast (ctimestamp1 as int) + ,cast (cstring1 as int) + ,cast (substr(cstring1, 1, 1) as int) + ,cast (cfloat as tinyint) + ,cast (cfloat as smallint) + ,cast (cfloat as bigint) +-- to float family + ,cast (ctinyint as double) + ,cast (csmallint as double) + ,cast (cint as double) + ,cast (cbigint as double) + ,cast (cfloat as double) + ,cast (cdouble as double) + ,cast (cboolean1 as double) + ,cast (ctimestamp1 as double) + ,cast (cstring1 as double) + ,cast (substr(cstring1, 1, 1) as double) + ,cast (cint as float) + ,cast (cdouble as float) +-- to timestamp + ,cast (ctinyint as timestamp) + ,cast (csmallint as timestamp) + ,cast (cint as timestamp) + ,cast (cbigint as timestamp) + ,cast (cfloat as timestamp) + ,cast (cdouble as timestamp) + ,cast (cboolean1 as timestamp) + ,cast (cbigint * 0 as timestamp) + ,cast (ctimestamp1 as timestamp) + ,cast (cstring1 as timestamp) + ,cast (substr(cstring1, 1, 1) as timestamp) +-- to string + ,cast (ctinyint as string) + ,cast (csmallint as string) + ,cast (cint as string) + ,cast (cbigint as string) + ,cast (cfloat as string) + ,cast (cdouble as string) + ,cast (cboolean1 as string) + ,cast (cbigint * 0 as string) + ,cast (ctimestamp1 as string) + ,cast (cstring1 as string) +-- nested and expression arguments + ,cast (cast (cfloat as int) as float) + ,cast (cint * 2 as double) + ,cast (sin(cfloat) as string) + ,cast (cint as float) + cast(cboolean1 as double) +from alltypesorc +-- limit output to a reasonably small number of rows +where cbigint % 250 = 0 +PREHOOK: type: QUERY +POSTHOOK: query: -- Test type casting in vectorized mode to verify end-to-end functionality. + +explain +select +-- to boolean + cast (ctinyint as boolean) + ,cast (csmallint as boolean) + ,cast (cint as boolean) + ,cast (cbigint as boolean) + ,cast (cfloat as boolean) + ,cast (cdouble as boolean) + ,cast (cboolean1 as boolean) + ,cast (cbigint * 0 as boolean) + ,cast (ctimestamp1 as boolean) + ,cast (cstring1 as boolean) +-- to int family + ,cast (ctinyint as int) + ,cast (csmallint as int) + ,cast (cint as int) + ,cast (cbigint as int) + ,cast (cfloat as int) + ,cast (cdouble as int) + ,cast (cboolean1 as int) + ,cast (ctimestamp1 as int) + ,cast (cstring1 as int) + ,cast (substr(cstring1, 1, 1) as int) + ,cast (cfloat as tinyint) + ,cast (cfloat as smallint) + ,cast (cfloat as bigint) +-- to float family + ,cast (ctinyint as double) + ,cast (csmallint as double) + ,cast (cint as double) + ,cast (cbigint as double) + ,cast (cfloat as double) + ,cast (cdouble as double) + ,cast (cboolean1 as double) + ,cast (ctimestamp1 as double) + ,cast (cstring1 as double) + ,cast (substr(cstring1, 1, 1) as double) + ,cast (cint as float) + ,cast (cdouble as float) +-- to timestamp + ,cast (ctinyint as timestamp) + ,cast (csmallint as timestamp) + ,cast (cint as timestamp) + ,cast (cbigint as timestamp) + ,cast (cfloat as timestamp) + ,cast (cdouble as timestamp) + ,cast (cboolean1 as timestamp) + ,cast (cbigint * 0 as timestamp) + ,cast (ctimestamp1 as timestamp) + ,cast (cstring1 as timestamp) + ,cast (substr(cstring1, 1, 1) as timestamp) +-- to string + ,cast (ctinyint as string) + ,cast (csmallint as string) + ,cast (cint as string) + ,cast (cbigint as string) + ,cast (cfloat as string) + ,cast (cdouble as string) + ,cast (cboolean1 as string) + ,cast (cbigint * 0 as string) + ,cast (ctimestamp1 as string) + ,cast (cstring1 as string) +-- nested and expression arguments + ,cast (cast (cfloat as int) as float) + ,cast (cint * 2 as double) + ,cast (sin(cfloat) as string) + ,cast (cint as float) + cast(cboolean1 as double) +from alltypesorc +-- limit output to a reasonably small number of rows +where cbigint % 250 = 0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: alltypesorc + Filter Operator + predicate: ((cbigint % 250) = 0) (type: boolean) + Select Operator + expressions: UDFToBoolean(ctinyint) (type: boolean), UDFToBoolean(csmallint) (type: boolean), UDFToBoolean(cint) (type: boolean), UDFToBoolean(cbigint) (type: boolean), UDFToBoolean(cfloat) (type: boolean), UDFToBoolean(cdouble) (type: boolean), cboolean1 (type: boolean), UDFToBoolean((cbigint * 0)) (type: boolean), UDFToBoolean(ctimestamp1) (type: boolean), UDFToBoolean(cstring1) (type: boolean), UDFToInteger(ctinyint) (type: int), UDFToInteger(csmallint) (type: int), cint (type: int), UDFToInteger(cbigint) (type: int), UDFToInteger(cfloat) (type: int), UDFToInteger(cdouble) (type: int), UDFToInteger(cboolean1) (type: int), UDFToInteger(ctimestamp1) (type: int), UDFToInteger(cstring1) (type: int), UDFToInteger(substr(cstring1, 1, 1)) (type: int), UDFToByte(cfloat) (type: tinyint), UDFToShort(cfloat) (type: smallint), UDFToLong(cfloat) (type: bigint), UDFToDouble(ctinyint) (type: double), UDFToDouble(csmallint) (type: double), UDFToDouble(cint) (type: double), UDFToDouble(cbigint) (type: double), UDFToDouble(cfloat) (type: double), cdouble (type: double), UDFToDouble(cboolean1) (type: double), UDFToDouble(ctimestamp1) (type: double), UDFToDouble(cstring1) (type: double), UDFToDouble(substr(cstring1, 1, 1)) (type: double), UDFToFloat(cint) (type: float), UDFToFloat(cdouble) (type: float), CAST( ctinyint AS TIMESTAMP) (type: timestamp), CAST( csmallint AS TIMESTAMP) (type: timestamp), CAST( cint AS TIMESTAMP) (type: timestamp), CAST( cbigint AS TIMESTAMP) (type: timestamp), CAST( cfloat AS TIMESTAMP) (type: timestamp), CAST( cdouble AS TIMESTAMP) (type: timestamp), CAST( cboolean1 AS TIMESTAMP) (type: timestamp), CAST( (cbigint * 0) AS TIMESTAMP) (type: timestamp), ctimestamp1 (type: timestamp), CAST( cstring1 AS TIMESTAMP) (type: timestamp), CAST( substr(cstring1, 1, 1) AS TIMESTAMP) (type: timestamp), UDFToString(ctinyint) (type: string), UDFToString(csmallint) (type: string), UDFToString(cint) (type: string), UDFToString(cbigint) (type: string), UDFToString(cfloat) (type: string), UDFToString(cdouble) (type: string), UDFToString(cboolean1) (type: string), UDFToString((cbigint * 0)) (type: string), UDFToString(ctimestamp1) (type: string), cstring1 (type: string), UDFToFloat(UDFToInteger(cfloat)) (type: float), UDFToDouble((cint * 2)) (type: double), UDFToString(sin(cfloat)) (type: string), (UDFToFloat(cint) + UDFToDouble(cboolean1)) (type: double) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44, _col45, _col46, _col47, _col48, _col49, _col50, _col51, _col52, _col53, _col54, _col55, _col56, _col57, _col58, _col59 + ListSink + +PREHOOK: query: select +-- to boolean + cast (ctinyint as boolean) + ,cast (csmallint as boolean) + ,cast (cint as boolean) + ,cast (cbigint as boolean) + ,cast (cfloat as boolean) + ,cast (cdouble as boolean) + ,cast (cboolean1 as boolean) + ,cast (cbigint * 0 as boolean) + ,cast (ctimestamp1 as boolean) + ,cast (cstring1 as boolean) +-- to int family + ,cast (ctinyint as int) + ,cast (csmallint as int) + ,cast (cint as int) + ,cast (cbigint as int) + ,cast (cfloat as int) + ,cast (cdouble as int) + ,cast (cboolean1 as int) + ,cast (ctimestamp1 as int) + ,cast (cstring1 as int) + ,cast (substr(cstring1, 1, 1) as int) + ,cast (cfloat as tinyint) + ,cast (cfloat as smallint) + ,cast (cfloat as bigint) +-- to float family + ,cast (ctinyint as double) + ,cast (csmallint as double) + ,cast (cint as double) + ,cast (cbigint as double) + ,cast (cfloat as double) + ,cast (cdouble as double) + ,cast (cboolean1 as double) + ,cast (ctimestamp1 as double) + ,cast (cstring1 as double) + ,cast (substr(cstring1, 1, 1) as double) + ,cast (cint as float) + ,cast (cdouble as float) +-- to timestamp + ,cast (ctinyint as timestamp) + ,cast (csmallint as timestamp) + ,cast (cint as timestamp) + ,cast (cbigint as timestamp) + ,cast (cfloat as timestamp) + ,cast (cdouble as timestamp) + ,cast (cboolean1 as timestamp) + ,cast (cbigint * 0 as timestamp) + ,cast (ctimestamp1 as timestamp) + ,cast (cstring1 as timestamp) + ,cast (substr(cstring1, 1, 1) as timestamp) +-- to string + ,cast (ctinyint as string) + ,cast (csmallint as string) + ,cast (cint as string) + ,cast (cbigint as string) + ,cast (cfloat as string) + ,cast (cdouble as string) + ,cast (cboolean1 as string) + ,cast (cbigint * 0 as string) + ,cast (ctimestamp1 as string) + ,cast (cstring1 as string) +-- nested and expression arguments + ,cast (cast (cfloat as int) as float) + ,cast (cint * 2 as double) + ,cast (sin(cfloat) as string) + ,cast (cint as float) + cast(cboolean1 as double) +from alltypesorc +-- limit output to a reasonably small number of rows +where cbigint % 250 = 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: select +-- to boolean + cast (ctinyint as boolean) + ,cast (csmallint as boolean) + ,cast (cint as boolean) + ,cast (cbigint as boolean) + ,cast (cfloat as boolean) + ,cast (cdouble as boolean) + ,cast (cboolean1 as boolean) + ,cast (cbigint * 0 as boolean) + ,cast (ctimestamp1 as boolean) + ,cast (cstring1 as boolean) +-- to int family + ,cast (ctinyint as int) + ,cast (csmallint as int) + ,cast (cint as int) + ,cast (cbigint as int) + ,cast (cfloat as int) + ,cast (cdouble as int) + ,cast (cboolean1 as int) + ,cast (ctimestamp1 as int) + ,cast (cstring1 as int) + ,cast (substr(cstring1, 1, 1) as int) + ,cast (cfloat as tinyint) + ,cast (cfloat as smallint) + ,cast (cfloat as bigint) +-- to float family + ,cast (ctinyint as double) + ,cast (csmallint as double) + ,cast (cint as double) + ,cast (cbigint as double) + ,cast (cfloat as double) + ,cast (cdouble as double) + ,cast (cboolean1 as double) + ,cast (ctimestamp1 as double) + ,cast (cstring1 as double) + ,cast (substr(cstring1, 1, 1) as double) + ,cast (cint as float) + ,cast (cdouble as float) +-- to timestamp + ,cast (ctinyint as timestamp) + ,cast (csmallint as timestamp) + ,cast (cint as timestamp) + ,cast (cbigint as timestamp) + ,cast (cfloat as timestamp) + ,cast (cdouble as timestamp) + ,cast (cboolean1 as timestamp) + ,cast (cbigint * 0 as timestamp) + ,cast (ctimestamp1 as timestamp) + ,cast (cstring1 as timestamp) + ,cast (substr(cstring1, 1, 1) as timestamp) +-- to string + ,cast (ctinyint as string) + ,cast (csmallint as string) + ,cast (cint as string) + ,cast (cbigint as string) + ,cast (cfloat as string) + ,cast (cdouble as string) + ,cast (cboolean1 as string) + ,cast (cbigint * 0 as string) + ,cast (ctimestamp1 as string) + ,cast (cstring1 as string) +-- nested and expression arguments + ,cast (cast (cfloat as int) as float) + ,cast (cint * 2 as double) + ,cast (sin(cfloat) as string) + ,cast (cint as float) + cast(cboolean1 as double) +from alltypesorc +-- limit output to a reasonably small number of rows +where cbigint % 250 = 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +true true NULL true true true NULL false true NULL -36 -200 NULL -2006216750 -36 -200 NULL -15 NULL NULL -36 -36 -36 -36.0 -200.0 NULL -2.00621675E9 -36.0 -200.0 NULL -14.252 NULL NULL NULL -200.0 1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1969-12-08 10:43:03.25 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.748 NULL NULL -36 -200 NULL -2006216750 -36.0 -200.0 NULL 0 1969-12-31 15:59:45.748 NULL -36.0 NULL 0.9917788534431158 NULL +true true NULL true true true NULL false true NULL -36 -200 NULL 1599879000 -36 -200 NULL -7 NULL NULL -36 -36 -36 -36.0 -200.0 NULL 1.599879E9 -36.0 -200.0 NULL -6.183 NULL NULL NULL -200.0 1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1970-01-19 04:24:39 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.817 NULL NULL -36 -200 NULL 1599879000 -36.0 -200.0 NULL 0 1969-12-31 15:59:53.817 NULL -36.0 NULL 0.9917788534431158 NULL +true true NULL true true true NULL false true NULL -30 -200 NULL 1429852250 -30 -200 NULL 12 NULL NULL -30 -30 -30 -30.0 -200.0 NULL 1.42985225E9 -30.0 -200.0 NULL 12.935 NULL NULL NULL -200.0 1969-12-31 15:59:59.97 1969-12-31 15:59:59.8 NULL 1970-01-17 05:10:52.25 1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 16:00:12.935 NULL NULL -30 -200 NULL 1429852250 -30.0 -200.0 NULL 0 1969-12-31 16:00:12.935 NULL -30.0 NULL 0.9880316240928618 NULL +true NULL true true true NULL false false true true -51 NULL 773600971 1053923250 -51 NULL 0 8 NULL 2 -51 -51 -51 -51.0 NULL 7.73600971E8 1.05392325E9 -51.0 NULL 0.0 8.451 NULL 2.0 7.7360096E8 NULL 1969-12-31 15:59:59.949 NULL 1970-01-09 14:53:20.971 1970-01-12 20:45:23.25 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -51 NULL 773600971 1053923250 -51.0 NULL FALSE 0 1969-12-31 16:00:08.451 2yK4Bx76O -51.0 1.547201942E9 -0.6702291758433747 7.7360096E8 +true NULL true true true NULL true false true true -51 NULL 747553882 -1930467250 -51 NULL 1 8 NULL NULL -51 -51 -51 -51.0 NULL 7.47553882E8 -1.93046725E9 -51.0 NULL 1.0 8.451 NULL NULL 7.4755386E8 NULL 1969-12-31 15:59:59.949 NULL 1970-01-09 07:39:13.882 1969-12-09 07:45:32.75 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -51 NULL 747553882 -1930467250 -51.0 NULL TRUE 0 1969-12-31 16:00:08.451 q8M86Fx0r -51.0 1.495107764E9 -0.6702291758433747 7.47553857E8 +true true NULL true true true NULL false true NULL 20 15601 NULL -362433250 20 15601 NULL -15 NULL NULL 20 20 20 20.0 15601.0 NULL -3.6243325E8 20.0 15601.0 NULL -14.871 NULL NULL NULL 15601.0 1969-12-31 16:00:00.02 1969-12-31 16:00:15.601 NULL 1969-12-27 11:19:26.75 1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.129 NULL NULL 20 15601 NULL -362433250 20.0 15601.0 NULL 0 1969-12-31 15:59:45.129 NULL 20.0 NULL 0.9129452507276277 NULL +true true NULL true true true NULL false true NULL -38 15601 NULL -1858689000 -38 15601 NULL -2 NULL NULL -38 -38 -38 -38.0 15601.0 NULL -1.858689E9 -38.0 15601.0 NULL -1.3860000000000001 NULL NULL NULL 15601.0 1969-12-31 15:59:59.962 1969-12-31 16:00:15.601 NULL 1969-12-10 03:41:51 1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:58.614 NULL NULL -38 15601 NULL -1858689000 -38.0 15601.0 NULL 0 1969-12-31 15:59:58.614 NULL -38.0 NULL -0.2963685787093853 NULL +true true NULL true true true NULL false true NULL -5 15601 NULL 612416000 -5 15601 NULL 4 NULL NULL -5 -5 -5 -5.0 15601.0 NULL 6.12416E8 -5.0 15601.0 NULL 4.679 NULL NULL NULL 15601.0 1969-12-31 15:59:59.995 1969-12-31 16:00:15.601 NULL 1970-01-07 18:06:56 1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 16:00:04.679 NULL NULL -5 15601 NULL 612416000 -5.0 15601.0 NULL 0 1969-12-31 16:00:04.679 NULL -5.0 NULL 0.9589242746631385 NULL +true true NULL true true true NULL false true NULL 48 15601 NULL -795361000 48 15601 NULL -10 NULL NULL 48 48 48 48.0 15601.0 NULL -7.95361E8 48.0 15601.0 NULL -9.765 NULL NULL NULL 15601.0 1969-12-31 16:00:00.048 1969-12-31 16:00:15.601 NULL 1969-12-22 11:03:59 1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:50.235 NULL NULL 48 15601 NULL -795361000 48.0 15601.0 NULL 0 1969-12-31 15:59:50.235 NULL 48.0 NULL -0.7682546613236668 NULL +true NULL true true true NULL false false true true 8 NULL -661621138 -931392750 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL -6.61621138E8 -9.3139275E8 8.0 NULL 0.0 15.892 NULL NULL -6.6162112E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-24 00:12:58.862 1969-12-20 21:16:47.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -661621138 -931392750 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 L15l8i5k558tBcDV20 8.0 -1.323242276E9 0.9893582466233818 -6.6162112E8 +true NULL true true true NULL false false true true 8 NULL -102936434 -1312782750 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL -1.02936434E8 -1.31278275E9 8.0 NULL 0.0 15.892 NULL NULL -1.02936432E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-30 11:24:23.566 1969-12-16 11:20:17.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -102936434 -1312782750 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 eJROSNhugc3kQR7Pb 8.0 -2.05872868E8 0.9893582466233818 -1.02936432E8 +true NULL true true true NULL false false true true 8 NULL 805179664 868161500 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL 8.05179664E8 8.681615E8 8.0 NULL 0.0 15.892 NULL NULL 8.0517965E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-09 23:39:39.664 1970-01-10 17:09:21.5 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 805179664 868161500 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 e005B5q 8.0 1.610359328E9 0.9893582466233818 8.05179648E8 +true NULL true true true NULL false false true true 8 NULL -669632311 1588591250 8 NULL 0 15 NULL 3 8 8 8 8.0 NULL -6.69632311E8 1.58859125E9 8.0 NULL 0.0 15.892 NULL 3.0 -6.6963232E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-23 21:59:27.689 1970-01-19 01:16:31.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -669632311 1588591250 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 3r3sDvfUkG0yTP3LnX5mNQRr 8.0 -1.339264622E9 0.9893582466233818 -6.6963232E8 +true NULL true true true NULL true false true true 8 NULL 890988972 -1862301000 8 NULL 1 15 NULL NULL 8 8 8 8.0 NULL 8.90988972E8 -1.862301E9 8.0 NULL 1.0 15.892 NULL NULL 8.9098899E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-10 23:29:48.972 1969-12-10 02:41:39 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 890988972 -1862301000 8.0 NULL TRUE 0 1969-12-31 16:00:15.892 XylAH4 8.0 1.781977944E9 0.9893582466233818 8.90988993E8 +true NULL true true true NULL true false true true 8 NULL 930867246 1205399250 8 NULL 1 15 NULL NULL 8 8 8 8.0 NULL 9.30867246E8 1.20539925E9 8.0 NULL 1.0 15.892 NULL NULL 9.3086726E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-11 10:34:27.246 1970-01-14 14:49:59.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 930867246 1205399250 8.0 NULL TRUE 0 1969-12-31 16:00:15.892 c1V8o1A 8.0 1.861734492E9 0.9893582466233818 9.30867265E8 +true true NULL true true true NULL false true NULL -59 -7196 NULL -1604890000 -59 -7196 NULL 13 NULL NULL -59 -59 -59 -59.0 -7196.0 NULL -1.60489E9 -59.0 -7196.0 NULL 13.15 NULL NULL NULL -7196.0 1969-12-31 15:59:59.941 1969-12-31 15:59:52.804 NULL 1969-12-13 02:11:50 1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:13.15 NULL NULL -59 -7196 NULL -1604890000 -59.0 -7196.0 NULL 0 1969-12-31 16:00:13.15 NULL -59.0 NULL -0.6367380071391379 NULL +true true NULL true true true NULL false true NULL -21 -7196 NULL 1542429000 -21 -7196 NULL -5 NULL NULL -21 -21 -21 -21.0 -7196.0 NULL 1.542429E9 -21.0 -7196.0 NULL -4.1 NULL NULL NULL -7196.0 1969-12-31 15:59:59.979 1969-12-31 15:59:52.804 NULL 1970-01-18 12:27:09 1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:55.9 NULL NULL -21 -7196 NULL 1542429000 -21.0 -7196.0 NULL 0 1969-12-31 15:59:55.9 NULL -21.0 NULL -0.8366556385360561 NULL +true true NULL true true true NULL false true NULL -60 -7196 NULL 1516314750 -60 -7196 NULL -8 NULL NULL -60 -60 -60 -60.0 -7196.0 NULL 1.51631475E9 -60.0 -7196.0 NULL -7.592 NULL NULL NULL -7196.0 1969-12-31 15:59:59.94 1969-12-31 15:59:52.804 NULL 1970-01-18 05:11:54.75 1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:52.408 NULL NULL -60 -7196 NULL 1516314750 -60.0 -7196.0 NULL 0 1969-12-31 15:59:52.408 NULL -60.0 NULL 0.3048106211022167 NULL +true true NULL true true true NULL false true NULL -14 -7196 NULL -1552199500 -14 -7196 NULL 11 NULL NULL -14 -14 -14 -14.0 -7196.0 NULL -1.5521995E9 -14.0 -7196.0 NULL 11.065 NULL NULL NULL -7196.0 1969-12-31 15:59:59.986 1969-12-31 15:59:52.804 NULL 1969-12-13 16:50:00.5 1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:11.065 NULL NULL -14 -7196 NULL -1552199500 -14.0 -7196.0 NULL 0 1969-12-31 16:00:11.065 NULL -14.0 NULL -0.9906073556948704 NULL +true true NULL true true true NULL false true NULL 59 -7196 NULL -1137754500 59 -7196 NULL 10 NULL NULL 59 59 59 59.0 -7196.0 NULL -1.1377545E9 59.0 -7196.0 NULL 10.956 NULL NULL NULL -7196.0 1969-12-31 16:00:00.059 1969-12-31 15:59:52.804 NULL 1969-12-18 11:57:25.5 1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.956 NULL NULL 59 -7196 NULL -1137754500 59.0 -7196.0 NULL 0 1969-12-31 16:00:10.956 NULL 59.0 NULL 0.6367380071391379 NULL +true true NULL true true true NULL false true NULL -8 -7196 NULL -1849991500 -8 -7196 NULL 3 NULL NULL -8 -8 -8 -8.0 -7196.0 NULL -1.8499915E9 -8.0 -7196.0 NULL 3.136 NULL NULL NULL -7196.0 1969-12-31 15:59:59.992 1969-12-31 15:59:52.804 NULL 1969-12-10 06:06:48.5 1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:03.136 NULL NULL -8 -7196 NULL -1849991500 -8.0 -7196.0 NULL 0 1969-12-31 16:00:03.136 NULL -8.0 NULL -0.9893582466233818 NULL +true true NULL true true true NULL false true NULL 5 -7196 NULL -1015607500 5 -7196 NULL 10 NULL NULL 5 5 5 5.0 -7196.0 NULL -1.0156075E9 5.0 -7196.0 NULL 10.973 NULL NULL NULL -7196.0 1969-12-31 16:00:00.005 1969-12-31 15:59:52.804 NULL 1969-12-19 21:53:12.5 1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.973 NULL NULL 5 -7196 NULL -1015607500 5.0 -7196.0 NULL 0 1969-12-31 16:00:10.973 NULL 5.0 NULL -0.9589242746631385 NULL +true true NULL true true true NULL false true NULL -24 -7196 NULL 829111000 -24 -7196 NULL -7 NULL NULL -24 -24 -24 -24.0 -7196.0 NULL 8.29111E8 -24.0 -7196.0 NULL -6.855 NULL NULL NULL -7196.0 1969-12-31 15:59:59.976 1969-12-31 15:59:52.804 NULL 1970-01-10 06:18:31 1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.145 NULL NULL -24 -7196 NULL 829111000 -24.0 -7196.0 NULL 0 1969-12-31 15:59:53.145 NULL -24.0 NULL 0.9055783620066238 NULL +true true NULL true true true NULL false true NULL -50 -7196 NULL -1031187250 -50 -7196 NULL -6 NULL NULL -50 -50 -50 -50.0 -7196.0 NULL -1.03118725E9 -50.0 -7196.0 NULL -5.267 NULL NULL NULL -7196.0 1969-12-31 15:59:59.95 1969-12-31 15:59:52.804 NULL 1969-12-19 17:33:32.75 1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:54.733 NULL NULL -50 -7196 NULL -1031187250 -50.0 -7196.0 NULL 0 1969-12-31 15:59:54.733 NULL -50.0 NULL 0.26237485370392877 NULL +true NULL true true true NULL true false true true 11 NULL -64615982 1803053750 11 NULL 1 2 NULL 8 11 11 11 11.0 NULL -6.4615982E7 1.80305375E9 11.0 NULL 1.0 2.351 NULL 8.0 -6.4615984E7 NULL 1969-12-31 16:00:00.011 NULL 1969-12-30 22:03:04.018 1970-01-21 12:50:53.75 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL 11 NULL -64615982 1803053750 11.0 NULL TRUE 0 1969-12-31 16:00:02.351 8J5OB7K26PEV7kdbeHr3 11.0 -1.29231964E8 -0.9999902065507035 -6.4615983E7 +true NULL true true true NULL true false true true 11 NULL -335450417 1233327000 11 NULL 1 2 NULL NULL 11 11 11 11.0 NULL -3.35450417E8 1.233327E9 11.0 NULL 1.0 2.351 NULL NULL -3.35450432E8 NULL 1969-12-31 16:00:00.011 NULL 1969-12-27 18:49:09.583 1970-01-14 22:35:27 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL 11 NULL -335450417 1233327000 11.0 NULL TRUE 0 1969-12-31 16:00:02.351 dOYnqgaXoJ1P3ERwxe5N7 11.0 -6.70900834E8 -0.9999902065507035 -3.35450431E8 diff --git ql/src/test/results/clientpositive/tez/vectorized_date_funcs.q.out ql/src/test/results/clientpositive/tez/vectorized_date_funcs.q.out new file mode 100644 index 0000000..8f08aca --- /dev/null +++ ql/src/test/results/clientpositive/tez/vectorized_date_funcs.q.out @@ -0,0 +1,1019 @@ +PREHOOK: query: -- Test timestamp functions in vectorized mode to verify they run correctly end-to-end. + +CREATE TABLE date_udf_flight ( + origin_city_name STRING, + dest_city_name STRING, + fl_date DATE, + arr_delay FLOAT, + fl_num INT +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@date_udf_flight +POSTHOOK: query: -- Test timestamp functions in vectorized mode to verify they run correctly end-to-end. + +CREATE TABLE date_udf_flight ( + origin_city_name STRING, + dest_city_name STRING, + fl_date DATE, + arr_delay FLOAT, + fl_num INT +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@date_udf_flight +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@date_udf_flight +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@date_udf_flight +PREHOOK: query: CREATE TABLE date_udf_flight_orc ( + fl_date DATE, + fl_time TIMESTAMP +) STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@date_udf_flight_orc +POSTHOOK: query: CREATE TABLE date_udf_flight_orc ( + fl_date DATE, + fl_time TIMESTAMP +) STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@date_udf_flight_orc +PREHOOK: query: INSERT INTO TABLE date_udf_flight_orc SELECT fl_date, to_utc_timestamp(fl_date, 'America/Los_Angeles') FROM date_udf_flight +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_flight +PREHOOK: Output: default@date_udf_flight_orc +POSTHOOK: query: INSERT INTO TABLE date_udf_flight_orc SELECT fl_date, to_utc_timestamp(fl_date, 'America/Los_Angeles') FROM date_udf_flight +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_flight +POSTHOOK: Output: default@date_udf_flight_orc +POSTHOOK: Lineage: date_udf_flight_orc.fl_date SIMPLE [(date_udf_flight)date_udf_flight.FieldSchema(name:fl_date, type:date, comment:null), ] +POSTHOOK: Lineage: date_udf_flight_orc.fl_time EXPRESSION [(date_udf_flight)date_udf_flight.FieldSchema(name:fl_date, type:date, comment:null), ] +PREHOOK: query: SELECT * FROM date_udf_flight_orc +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM date_udf_flight_orc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +2010-10-20 2010-10-20 07:00:00 +2010-10-20 2010-10-20 07:00:00 +2010-10-20 2010-10-20 07:00:00 +2010-10-20 2010-10-20 07:00:00 +2010-10-20 2010-10-20 07:00:00 +2010-10-20 2010-10-20 07:00:00 +2010-10-20 2010-10-20 07:00:00 +2010-10-20 2010-10-20 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-31 2010-10-31 07:00:00 +2010-10-31 2010-10-31 07:00:00 +2010-10-31 2010-10-31 07:00:00 +2010-10-31 2010-10-31 07:00:00 +2010-10-31 2010-10-31 07:00:00 +2010-10-31 2010-10-31 07:00:00 +2010-10-31 2010-10-31 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-22 2010-10-22 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-20 2010-10-20 07:00:00 +2010-10-20 2010-10-20 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-30 2010-10-30 07:00:00 +2010-10-20 2010-10-20 07:00:00 +2010-10-21 2010-10-21 07:00:00 +2010-10-23 2010-10-23 07:00:00 +2010-10-24 2010-10-24 07:00:00 +2010-10-25 2010-10-25 07:00:00 +2010-10-26 2010-10-26 07:00:00 +2010-10-27 2010-10-27 07:00:00 +2010-10-28 2010-10-28 07:00:00 +2010-10-29 2010-10-29 07:00:00 +2010-10-31 2010-10-31 07:00:00 +PREHOOK: query: EXPLAIN SELECT + to_unix_timestamp(fl_time), + year(fl_time), + month(fl_time), + day(fl_time), + dayofmonth(fl_time), + weekofyear(fl_time), + date(fl_time), + to_date(fl_time), + date_add(fl_time, 2), + date_sub(fl_time, 2), + datediff(fl_time, "2000-01-01") +FROM date_udf_flight_orc +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT + to_unix_timestamp(fl_time), + year(fl_time), + month(fl_time), + day(fl_time), + dayofmonth(fl_time), + weekofyear(fl_time), + date(fl_time), + to_date(fl_time), + date_add(fl_time, 2), + date_sub(fl_time, 2), + datediff(fl_time, "2000-01-01") +FROM date_udf_flight_orc +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: date_udf_flight_orc + Select Operator + expressions: to_unix_timestamp(fl_time) (type: bigint), year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: int), dayofmonth(fl_time) (type: int), weekofyear(fl_time) (type: int), CAST( fl_time AS DATE) (type: date), to_date(fl_time) (type: string), date_add(fl_time, 2) (type: string), date_sub(fl_time, 2) (type: string), datediff(fl_time, '2000-01-01') (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 + ListSink + +PREHOOK: query: SELECT + to_unix_timestamp(fl_time), + year(fl_time), + month(fl_time), + day(fl_time), + dayofmonth(fl_time), + weekofyear(fl_time), + date(fl_time), + to_date(fl_time), + date_add(fl_time, 2), + date_sub(fl_time, 2), + datediff(fl_time, "2000-01-01") +FROM date_udf_flight_orc +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +POSTHOOK: query: SELECT + to_unix_timestamp(fl_time), + year(fl_time), + month(fl_time), + day(fl_time), + dayofmonth(fl_time), + weekofyear(fl_time), + date(fl_time), + to_date(fl_time), + date_add(fl_time, 2), + date_sub(fl_time, 2), + datediff(fl_time, "2000-01-01") +FROM date_udf_flight_orc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288533600 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288533600 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288533600 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288533600 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288533600 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288533600 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288533600 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287756000 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288447200 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1287583200 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287669600 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287842400 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287928800 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1288015200 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288101600 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288188000 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288274400 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288360800 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288533600 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +PREHOOK: query: EXPLAIN SELECT + to_unix_timestamp(fl_date), + year(fl_date), + month(fl_date), + day(fl_date), + dayofmonth(fl_date), + weekofyear(fl_date), + date(fl_date), + to_date(fl_date), + date_add(fl_date, 2), + date_sub(fl_date, 2), + datediff(fl_date, "2000-01-01") +FROM date_udf_flight_orc +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT + to_unix_timestamp(fl_date), + year(fl_date), + month(fl_date), + day(fl_date), + dayofmonth(fl_date), + weekofyear(fl_date), + date(fl_date), + to_date(fl_date), + date_add(fl_date, 2), + date_sub(fl_date, 2), + datediff(fl_date, "2000-01-01") +FROM date_udf_flight_orc +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: date_udf_flight_orc + Select Operator + expressions: to_unix_timestamp(fl_date) (type: bigint), year(fl_date) (type: int), month(fl_date) (type: int), day(fl_date) (type: int), dayofmonth(fl_date) (type: int), weekofyear(fl_date) (type: int), CAST( fl_date AS DATE) (type: date), to_date(fl_date) (type: string), date_add(fl_date, 2) (type: string), date_sub(fl_date, 2) (type: string), datediff(fl_date, '2000-01-01') (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 + ListSink + +PREHOOK: query: SELECT + to_unix_timestamp(fl_date), + year(fl_date), + month(fl_date), + day(fl_date), + dayofmonth(fl_date), + weekofyear(fl_date), + date(fl_date), + to_date(fl_date), + date_add(fl_date, 2), + date_sub(fl_date, 2), + datediff(fl_date, "2000-01-01") +FROM date_udf_flight_orc +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +POSTHOOK: query: SELECT + to_unix_timestamp(fl_date), + year(fl_date), + month(fl_date), + day(fl_date), + dayofmonth(fl_date), + weekofyear(fl_date), + date(fl_date), + to_date(fl_date), + date_add(fl_date, 2), + date_sub(fl_date, 2), + datediff(fl_date, "2000-01-01") +FROM date_udf_flight_orc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288508400 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288508400 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288508400 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288508400 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288508400 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288508400 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288508400 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287730800 2010 10 22 22 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1288422000 2010 10 30 30 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 +1287558000 2010 10 20 20 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 +1287644400 2010 10 21 21 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 +1287817200 2010 10 23 23 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 +1287903600 2010 10 24 24 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 +1287990000 2010 10 25 25 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 +1288076400 2010 10 26 26 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 +1288162800 2010 10 27 27 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 +1288249200 2010 10 28 28 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 +1288335600 2010 10 29 29 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 +1288508400 2010 10 31 31 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 +PREHOOK: query: EXPLAIN SELECT + year(fl_time) = year(fl_date), + month(fl_time) = month(fl_date), + day(fl_time) = day(fl_date), + dayofmonth(fl_time) = dayofmonth(fl_date), + weekofyear(fl_time) = weekofyear(fl_date), + date(fl_time) = date(fl_date), + to_date(fl_time) = to_date(fl_date), + date_add(fl_time, 2) = date_add(fl_date, 2), + date_sub(fl_time, 2) = date_sub(fl_date, 2), + datediff(fl_time, "2000-01-01") = datediff(fl_date, "2000-01-01") +FROM date_udf_flight_orc +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT + year(fl_time) = year(fl_date), + month(fl_time) = month(fl_date), + day(fl_time) = day(fl_date), + dayofmonth(fl_time) = dayofmonth(fl_date), + weekofyear(fl_time) = weekofyear(fl_date), + date(fl_time) = date(fl_date), + to_date(fl_time) = to_date(fl_date), + date_add(fl_time, 2) = date_add(fl_date, 2), + date_sub(fl_time, 2) = date_sub(fl_date, 2), + datediff(fl_time, "2000-01-01") = datediff(fl_date, "2000-01-01") +FROM date_udf_flight_orc +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: date_udf_flight_orc + Select Operator + expressions: (year(fl_time) = year(fl_date)) (type: boolean), (month(fl_time) = month(fl_date)) (type: boolean), (day(fl_time) = day(fl_date)) (type: boolean), (dayofmonth(fl_time) = dayofmonth(fl_date)) (type: boolean), (weekofyear(fl_time) = weekofyear(fl_date)) (type: boolean), (CAST( fl_time AS DATE) = CAST( fl_date AS DATE)) (type: boolean), (to_date(fl_time) = to_date(fl_date)) (type: boolean), (date_add(fl_time, 2) = date_add(fl_date, 2)) (type: boolean), (date_sub(fl_time, 2) = date_sub(fl_date, 2)) (type: boolean), (datediff(fl_time, '2000-01-01') = datediff(fl_date, '2000-01-01')) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 + ListSink + +PREHOOK: query: -- Should all be true or NULL +SELECT + year(fl_time) = year(fl_date), + month(fl_time) = month(fl_date), + day(fl_time) = day(fl_date), + dayofmonth(fl_time) = dayofmonth(fl_date), + weekofyear(fl_time) = weekofyear(fl_date), + date(fl_time) = date(fl_date), + to_date(fl_time) = to_date(fl_date), + date_add(fl_time, 2) = date_add(fl_date, 2), + date_sub(fl_time, 2) = date_sub(fl_date, 2), + datediff(fl_time, "2000-01-01") = datediff(fl_date, "2000-01-01") +FROM date_udf_flight_orc +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +POSTHOOK: query: -- Should all be true or NULL +SELECT + year(fl_time) = year(fl_date), + month(fl_time) = month(fl_date), + day(fl_time) = day(fl_date), + dayofmonth(fl_time) = dayofmonth(fl_date), + weekofyear(fl_time) = weekofyear(fl_date), + date(fl_time) = date(fl_date), + to_date(fl_time) = to_date(fl_date), + date_add(fl_time, 2) = date_add(fl_date, 2), + date_sub(fl_time, 2) = date_sub(fl_date, 2), + datediff(fl_time, "2000-01-01") = datediff(fl_date, "2000-01-01") +FROM date_udf_flight_orc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +true true true true true true true true true true +PREHOOK: query: EXPLAIN SELECT + fl_date, + to_date(date_add(fl_date, 2)), + to_date(date_sub(fl_date, 2)), + datediff(fl_date, date_add(fl_date, 2)), + datediff(fl_date, date_sub(fl_date, 2)), + datediff(date_add(fl_date, 2), date_sub(fl_date, 2)) +FROM date_udf_flight_orc LIMIT 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT + fl_date, + to_date(date_add(fl_date, 2)), + to_date(date_sub(fl_date, 2)), + datediff(fl_date, date_add(fl_date, 2)), + datediff(fl_date, date_sub(fl_date, 2)), + datediff(date_add(fl_date, 2), date_sub(fl_date, 2)) +FROM date_udf_flight_orc LIMIT 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + TableScan + alias: date_udf_flight_orc + Select Operator + expressions: fl_date (type: date), to_date(date_add(fl_date, 2)) (type: string), to_date(date_sub(fl_date, 2)) (type: string), datediff(fl_date, date_add(fl_date, 2)) (type: int), datediff(fl_date, date_sub(fl_date, 2)) (type: int), datediff(date_add(fl_date, 2), date_sub(fl_date, 2)) (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Limit + Number of rows: 10 + ListSink + +PREHOOK: query: SELECT + fl_date, + to_date(date_add(fl_date, 2)), + to_date(date_sub(fl_date, 2)), + datediff(fl_date, date_add(fl_date, 2)), + datediff(fl_date, date_sub(fl_date, 2)), + datediff(date_add(fl_date, 2), date_sub(fl_date, 2)) +FROM date_udf_flight_orc LIMIT 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +POSTHOOK: query: SELECT + fl_date, + to_date(date_add(fl_date, 2)), + to_date(date_sub(fl_date, 2)), + datediff(fl_date, date_add(fl_date, 2)), + datediff(fl_date, date_sub(fl_date, 2)), + datediff(date_add(fl_date, 2), date_sub(fl_date, 2)) +FROM date_udf_flight_orc LIMIT 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +2010-10-20 2010-10-22 2010-10-18 -1 2 4 +2010-10-20 2010-10-22 2010-10-18 -1 2 4 +2010-10-20 2010-10-22 2010-10-18 -1 2 4 +2010-10-20 2010-10-22 2010-10-18 -1 2 4 +2010-10-20 2010-10-22 2010-10-18 -1 2 4 +2010-10-20 2010-10-22 2010-10-18 -1 2 4 +2010-10-20 2010-10-22 2010-10-18 -1 2 4 +2010-10-20 2010-10-22 2010-10-18 -1 2 4 +2010-10-21 2010-10-23 2010-10-19 -1 2 4 +2010-10-21 2010-10-23 2010-10-19 -1 2 4 +PREHOOK: query: -- Test extracting the date part of expression that includes time +SELECT to_date('2009-07-30 04:17:52') FROM date_udf_flight_orc LIMIT 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +POSTHOOK: query: -- Test extracting the date part of expression that includes time +SELECT to_date('2009-07-30 04:17:52') FROM date_udf_flight_orc LIMIT 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +2009-07-30 +PREHOOK: query: EXPLAIN SELECT + min(fl_date) AS c1, + max(fl_date), + count(fl_date), + count(*) +FROM date_udf_flight_orc +ORDER BY c1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT + min(fl_date) AS c1, + max(fl_date), + count(fl_date), + count(*) +FROM date_udf_flight_orc +ORDER BY c1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: date_udf_flight_orc + Statistics: Num rows: 137 Data size: 13152 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: fl_date (type: date) + outputColumnNames: fl_date + Statistics: Num rows: 137 Data size: 13152 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: min(fl_date), max(fl_date), count(fl_date), count() + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: date), _col1 (type: date), _col2 (type: bigint), _col3 (type: bigint) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: min(VALUE._col0), max(VALUE._col1), count(VALUE._col2), count(VALUE._col3) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: date), _col1 (type: date), _col2 (type: bigint), _col3 (type: bigint) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date), _col2 (type: bigint), _col3 (type: bigint) + Execution mode: vectorized + Reducer 3 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: date), VALUE._col1 (type: bigint), VALUE._col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT + min(fl_date) AS c1, + max(fl_date), + count(fl_date), + count(*) +FROM date_udf_flight_orc +ORDER BY c1 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +POSTHOOK: query: SELECT + min(fl_date) AS c1, + max(fl_date), + count(fl_date), + count(*) +FROM date_udf_flight_orc +ORDER BY c1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_udf_flight_orc +#### A masked pattern was here #### +2010-10-20 2010-10-31 137 137 diff --git ql/src/test/results/clientpositive/tez/vectorized_distinct_gby.q.out ql/src/test/results/clientpositive/tez/vectorized_distinct_gby.q.out new file mode 100644 index 0000000..6910325 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vectorized_distinct_gby.q.out @@ -0,0 +1,157 @@ +PREHOOK: query: create table dtest(a int, b int) clustered by (a) sorted by (a) into 1 buckets stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@dtest +POSTHOOK: query: create table dtest(a int, b int) clustered by (a) sorted by (a) into 1 buckets stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@dtest +PREHOOK: query: insert into table dtest select c,b from (select array(300,300,300,300,300) as a, 1 as b from src limit 1) y lateral view explode(a) t1 as c +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@dtest +POSTHOOK: query: insert into table dtest select c,b from (select array(300,300,300,300,300) as a, 1 as b from src limit 1) y lateral view explode(a) t1 as c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@dtest +POSTHOOK: Lineage: dtest.a SIMPLE [] +POSTHOOK: Lineage: dtest.b EXPRESSION [] +PREHOOK: query: explain select sum(distinct a), count(distinct a) from dtest +PREHOOK: type: QUERY +POSTHOOK: query: explain select sum(distinct a), count(distinct a) from dtest +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: dtest + Statistics: Num rows: 5 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: a (type: int) + outputColumnNames: a + Statistics: Num rows: 5 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(DISTINCT a), count(DISTINCT a) + bucketGroup: true + keys: a (type: int) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 5 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 5 Data size: 40 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: sum(DISTINCT KEY._col0:0._col0), count(DISTINCT KEY._col0:1._col0) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: bigint), _col1 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(distinct a), count(distinct a) from dtest +PREHOOK: type: QUERY +PREHOOK: Input: default@dtest +#### A masked pattern was here #### +POSTHOOK: query: select sum(distinct a), count(distinct a) from dtest +POSTHOOK: type: QUERY +POSTHOOK: Input: default@dtest +#### A masked pattern was here #### +300 1 +PREHOOK: query: explain select sum(distinct cint), count(distinct cint), avg(distinct cint), std(distinct cint) from alltypesorc +PREHOOK: type: QUERY +POSTHOOK: query: explain select sum(distinct cint), count(distinct cint), avg(distinct cint), std(distinct cint) from alltypesorc +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: alltypesorc + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: cint (type: int) + outputColumnNames: cint + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(DISTINCT cint), count(DISTINCT cint), avg(DISTINCT cint), std(DISTINCT cint) + keys: cint (type: int) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: sum(DISTINCT KEY._col0:0._col0), count(DISTINCT KEY._col0:1._col0), avg(DISTINCT KEY._col0:2._col0), std(DISTINCT KEY._col0:3._col0) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: double) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(distinct cint), count(distinct cint), avg(distinct cint), std(distinct cint) from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: select sum(distinct cint), count(distinct cint), avg(distinct cint), std(distinct cint) from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +-3482841611 6082 -572647.4204209142 6.153814687328991E8 diff --git ql/src/test/results/clientpositive/vector_aggregate_9.q.out ql/src/test/results/clientpositive/vector_aggregate_9.q.out new file mode 100644 index 0000000..c0fe295 --- /dev/null +++ ql/src/test/results/clientpositive/vector_aggregate_9.q.out @@ -0,0 +1,167 @@ +PREHOOK: query: create table vectortab2k( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + dc decimal(38,18), + bo boolean, + s string, + s2 string, + ts timestamp, + ts2 timestamp, + dt date) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vectortab2k +POSTHOOK: query: create table vectortab2k( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + dc decimal(38,18), + bo boolean, + s string, + s2 string, + ts timestamp, + ts2 timestamp, + dt date) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vectortab2k +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/vectortab2k' OVERWRITE INTO TABLE vectortab2k +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@vectortab2k +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/vectortab2k' OVERWRITE INTO TABLE vectortab2k +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@vectortab2k +PREHOOK: query: create table vectortab2korc( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + dc decimal(38,18), + bo boolean, + s string, + s2 string, + ts timestamp, + ts2 timestamp, + dt date) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vectortab2korc +POSTHOOK: query: create table vectortab2korc( + t tinyint, + si smallint, + i int, + b bigint, + f float, + d double, + dc decimal(38,18), + bo boolean, + s string, + s2 string, + ts timestamp, + ts2 timestamp, + dt date) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vectortab2korc +PREHOOK: query: INSERT INTO TABLE vectortab2korc SELECT * FROM vectortab2k +PREHOOK: type: QUERY +PREHOOK: Input: default@vectortab2k +PREHOOK: Output: default@vectortab2korc +POSTHOOK: query: INSERT INTO TABLE vectortab2korc SELECT * FROM vectortab2k +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vectortab2k +POSTHOOK: Output: default@vectortab2korc +POSTHOOK: Lineage: vectortab2korc.b SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:b, type:bigint, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.bo SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:bo, type:boolean, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.d SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:d, type:double, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.dc SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:dc, type:decimal(38,18), comment:null), ] +POSTHOOK: Lineage: vectortab2korc.dt SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:dt, type:date, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.f SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:f, type:float, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.i SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:i, type:int, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.s SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:s, type:string, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.s2 SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:s2, type:string, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.si SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:si, type:smallint, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.t SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:t, type:tinyint, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.ts SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:ts, type:timestamp, comment:null), ] +POSTHOOK: Lineage: vectortab2korc.ts2 SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:ts2, type:timestamp, comment:null), ] +PREHOOK: query: explain +select min(dc), max(dc), sum(dc), avg(dc) from vectortab2korc +PREHOOK: type: QUERY +POSTHOOK: query: explain +select min(dc), max(dc), sum(dc), avg(dc) from vectortab2korc +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vectortab2korc + Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dc (type: decimal(38,18)) + outputColumnNames: dc + Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: min(dc), max(dc), sum(dc), avg(dc) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(38,18)), _col1 (type: decimal(38,18)), _col2 (type: decimal(38,18)), _col3 (type: struct) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: min(VALUE._col0), max(VALUE._col1), sum(VALUE._col2), avg(VALUE._col3) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 448 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: decimal(38,18)), _col1 (type: decimal(38,18)), _col2 (type: decimal(38,18)), _col3 (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 448 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 448 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select min(dc), max(dc), sum(dc), avg(dc) from vectortab2korc +PREHOOK: type: QUERY +PREHOOK: Input: default@vectortab2korc +#### A masked pattern was here #### +POSTHOOK: query: select min(dc), max(dc), sum(dc), avg(dc) from vectortab2korc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vectortab2korc +#### A masked pattern was here #### +-4997414117561.546875 4994550248722.298828 -10252745435816.024410 -5399023399.587163986308583465 diff --git ql/src/test/results/clientpositive/vector_between_in.q.out ql/src/test/results/clientpositive/vector_between_in.q.out index 631ac19..e21dd85 100644 --- ql/src/test/results/clientpositive/vector_between_in.q.out +++ ql/src/test/results/clientpositive/vector_between_in.q.out @@ -662,15 +662,15 @@ POSTHOOK: Input: default@decimal_date_test 14.9324324324 19.1135135135 20.3081081081 -22.1000000000 +22.1 24.4891891892 33.4486486486 34.6432432432 40.0189189189 42.4081081081 43.0054054054 -44.2000000000 -44.2000000000 +44.2 +44.2 44.7972972973 45.9918918919 PREHOOK: query: SELECT COUNT(*) FROM decimal_date_test WHERE cdecimal1 NOT BETWEEN -2000 AND 4390.1351351351 diff --git ql/src/test/results/clientpositive/vector_decimal_1.q.out ql/src/test/results/clientpositive/vector_decimal_1.q.out new file mode 100644 index 0000000..7d0ff75 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_1.q.out @@ -0,0 +1,528 @@ +PREHOOK: query: drop table if exists decimal_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists decimal_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table decimal_1 (t decimal(4,2), u decimal(5), v decimal) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_1 +POSTHOOK: query: create table decimal_1 (t decimal(4,2), u decimal(5), v decimal) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_1 +PREHOOK: query: desc decimal_1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@decimal_1 +POSTHOOK: query: desc decimal_1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@decimal_1 +t decimal(4,2) +u decimal(5,0) +v decimal(10,0) +PREHOOK: query: insert overwrite table decimal_1 + select cast('17.29' as decimal(4,2)), 3.1415926BD, 3115926.54321BD from src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@decimal_1 +POSTHOOK: query: insert overwrite table decimal_1 + select cast('17.29' as decimal(4,2)), 3.1415926BD, 3115926.54321BD from src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@decimal_1 +POSTHOOK: Lineage: decimal_1.t EXPRESSION [] +POSTHOOK: Lineage: decimal_1.u EXPRESSION [] +POSTHOOK: Lineage: decimal_1.v EXPRESSION [] +PREHOOK: query: explain +select cast(t as boolean) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as boolean) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToBoolean(t) (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: boolean) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as boolean) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as boolean) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +true +PREHOOK: query: explain +select cast(t as tinyint) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as tinyint) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToByte(t) (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: tinyint) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as tinyint) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as tinyint) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as smallint) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as smallint) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToShort(t) (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as smallint) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as smallint) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as int) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as int) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToInteger(t) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as int) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as int) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as bigint) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as bigint) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToLong(t) (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as bigint) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as bigint) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as float) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as float) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToFloat(t) (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: float) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as float) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as float) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17.29 +PREHOOK: query: explain +select cast(t as double) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as double) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToDouble(t) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: double) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as double) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as double) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17.29 +PREHOOK: query: explain +select cast(t as string) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as string) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToString(t) (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as string) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as string) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +17.29 +PREHOOK: query: explain +select cast(t as timestamp) from decimal_1 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as timestamp) from decimal_1 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_1 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: CAST( t AS TIMESTAMP) (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as timestamp) from decimal_1 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as timestamp) from decimal_1 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_1 +#### A masked pattern was here #### +1969-12-31 16:00:17.29 +PREHOOK: query: drop table decimal_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_1 +PREHOOK: Output: default@decimal_1 +POSTHOOK: query: drop table decimal_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_1 +POSTHOOK: Output: default@decimal_1 diff --git ql/src/test/results/clientpositive/vector_decimal_10_0.q.out ql/src/test/results/clientpositive/vector_decimal_10_0.q.out new file mode 100644 index 0000000..1fb0e30 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_10_0.q.out @@ -0,0 +1,105 @@ +PREHOOK: query: DROP TABLE IF EXISTS decimal_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS decimal_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS decimal +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS decimal +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE decimal_txt (dec decimal) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_txt +POSTHOOK: query: CREATE TABLE decimal_txt (dec decimal) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal_10_0.txt' OVERWRITE INTO TABLE decimal_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal_10_0.txt' OVERWRITE INTO TABLE decimal_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_txt +PREHOOK: query: CREATE TABLE DECIMAL STORED AS ORC AS SELECT * FROM decimal_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@decimal_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL +POSTHOOK: query: CREATE TABLE DECIMAL STORED AS ORC AS SELECT * FROM decimal_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@decimal_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL +PREHOOK: query: EXPLAIN +SELECT dec FROM DECIMAL order by dec +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT dec FROM DECIMAL order by dec +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal + Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dec (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT dec FROM DECIMAL order by dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec FROM DECIMAL order by dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal +#### A masked pattern was here #### +NULL +1000000000 +PREHOOK: query: DROP TABLE DECIMAL_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_txt +PREHOOK: Output: default@decimal_txt +POSTHOOK: query: DROP TABLE DECIMAL_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_txt +POSTHOOK: Output: default@decimal_txt +PREHOOK: query: DROP TABLE DECIMAL +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal +PREHOOK: Output: default@decimal +POSTHOOK: query: DROP TABLE DECIMAL +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal +POSTHOOK: Output: default@decimal diff --git ql/src/test/results/clientpositive/vector_decimal_2.q.out ql/src/test/results/clientpositive/vector_decimal_2.q.out new file mode 100644 index 0000000..d273810 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_2.q.out @@ -0,0 +1,1487 @@ +PREHOOK: query: drop table decimal_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table decimal_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table decimal_2 (t decimal(18,9)) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_2 +POSTHOOK: query: create table decimal_2 (t decimal(18,9)) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_2 +PREHOOK: query: insert overwrite table decimal_2 + select cast('17.29' as decimal(4,2)) from src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@decimal_2 +POSTHOOK: query: insert overwrite table decimal_2 + select cast('17.29' as decimal(4,2)) from src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@decimal_2 +POSTHOOK: Lineage: decimal_2.t EXPRESSION [] +PREHOOK: query: explain +select cast(t as boolean) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as boolean) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToBoolean(t) (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: boolean) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as boolean) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as boolean) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +true +PREHOOK: query: explain +select cast(t as tinyint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as tinyint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToByte(t) (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: tinyint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as tinyint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as tinyint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as smallint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as smallint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToShort(t) (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as smallint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as smallint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as int) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as int) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToInteger(t) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as int) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as int) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as bigint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as bigint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToLong(t) (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as bigint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as bigint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17 +PREHOOK: query: explain +select cast(t as float) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as float) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToFloat(t) (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: float) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as float) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as float) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17.29 +PREHOOK: query: explain +select cast(t as double) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as double) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToDouble(t) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: double) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as double) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as double) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17.29 +PREHOOK: query: explain +select cast(t as string) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as string) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToString(t) (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as string) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as string) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +17.29 +PREHOOK: query: insert overwrite table decimal_2 + select cast('3404045.5044003' as decimal(18,9)) from src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@decimal_2 +POSTHOOK: query: insert overwrite table decimal_2 + select cast('3404045.5044003' as decimal(18,9)) from src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@decimal_2 +POSTHOOK: Lineage: decimal_2.t EXPRESSION [] +PREHOOK: query: explain +select cast(t as boolean) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as boolean) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToBoolean(t) (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: boolean) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: boolean) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as boolean) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as boolean) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +true +PREHOOK: query: explain +select cast(t as tinyint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as tinyint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToByte(t) (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: tinyint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: tinyint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as tinyint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as tinyint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +13 +PREHOOK: query: explain +select cast(t as smallint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as smallint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToShort(t) (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as smallint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as smallint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +-3827 +PREHOOK: query: explain +select cast(t as int) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as int) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToInteger(t) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as int) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as int) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3404045 +PREHOOK: query: explain +select cast(t as bigint) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as bigint) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToLong(t) (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as bigint) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as bigint) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3404045 +PREHOOK: query: explain +select cast(t as float) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as float) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToFloat(t) (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: float) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: float) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as float) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as float) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3404045.5 +PREHOOK: query: explain +select cast(t as double) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as double) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToDouble(t) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: double) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as double) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as double) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3404045.5044003 +PREHOOK: query: explain +select cast(t as string) from decimal_2 order by t +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(t as string) from decimal_2 order by t +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: UDFToString(t) (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(t as string) from decimal_2 order by t +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(t as string) from decimal_2 order by t +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3404045.5044003 +PREHOOK: query: explain +select cast(3.14 as decimal(4,2)) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(3.14 as decimal(4,2)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3.14 AS decimal(4,2)) (type: decimal(4,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(4,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(3.14 as decimal(4,2)) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(3.14 as decimal(4,2)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3.14 +PREHOOK: query: explain +select cast(cast(3.14 as float) as decimal(4,2)) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(cast(3.14 as float) as decimal(4,2)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3.14 AS decimal(4,2)) (type: decimal(4,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(4,2)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(4,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(cast(3.14 as float) as decimal(4,2)) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(cast(3.14 as float) as decimal(4,2)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3.14 +PREHOOK: query: explain +select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 2012-12-19 11:12:19.1234567 AS decimal(30,8)) (type: decimal(30,8)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(30,8)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(30,8)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +1355944339.1234567 +PREHOOK: query: explain +select cast(true as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(true as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( true AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain +select cast(true as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(true as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( true AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(true as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(true as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +1 +PREHOOK: query: explain +select cast(3Y as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(3Y as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3 AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(3Y as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(3Y as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3 +PREHOOK: query: explain +select cast(3S as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(3S as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3 AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(3S as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(3S as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3 +PREHOOK: query: explain +select cast(cast(3 as int) as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(cast(3 as int) as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3 AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(cast(3 as int) as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(cast(3 as int) as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3 +PREHOOK: query: explain +select cast(3L as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(3L as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 3 AS decimal(10,0)) (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(3L as decimal) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(3L as decimal) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +3 +PREHOOK: query: explain +select cast(0.99999999999999999999 as decimal(20,19)) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast(0.99999999999999999999 as decimal(20,19)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( 1.0 AS decimal(20,19)) (type: decimal(20,19)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(20,19)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(20,19)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast(0.99999999999999999999 as decimal(20,19)) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast(0.99999999999999999999 as decimal(20,19)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +1.0 +PREHOOK: query: explain +select cast('0.99999999999999999999' as decimal(20,20)) as c from decimal_2 order by c +PREHOOK: type: QUERY +POSTHOOK: query: explain +select cast('0.99999999999999999999' as decimal(20,20)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_2 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( '0.99999999999999999999' AS decimal(20,20)) (type: decimal(20,20)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(20,20)) + sort order: + + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(20,20)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select cast('0.99999999999999999999' as decimal(20,20)) as c from decimal_2 order by c +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +POSTHOOK: query: select cast('0.99999999999999999999' as decimal(20,20)) as c from decimal_2 order by c +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_2 +#### A masked pattern was here #### +0.99999999999999999999 +PREHOOK: query: drop table decimal_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_2 +PREHOOK: Output: default@decimal_2 +POSTHOOK: query: drop table decimal_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_2 +POSTHOOK: Output: default@decimal_2 diff --git ql/src/test/results/clientpositive/vector_decimal_3.q.out ql/src/test/results/clientpositive/vector_decimal_3.q.out new file mode 100644 index 0000000..e982e1b --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_3.q.out @@ -0,0 +1,374 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_3_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_3_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_3_txt(key decimal(38,18), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_3_txt +POSTHOOK: query: CREATE TABLE DECIMAL_3_txt(key decimal(38,18), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_3_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_3_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_3_txt +PREHOOK: query: CREATE TABLE DECIMAL_3 STORED AS ORC AS SELECT * FROM DECIMAL_3_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@decimal_3_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_3 +POSTHOOK: query: CREATE TABLE DECIMAL_3 STORED AS ORC AS SELECT * FROM DECIMAL_3_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@decimal_3_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_3 +PREHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +NULL 0 +-1234567890.1234567890 -1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -1 +-1.12 -1 +-0.333 0 +-0.33 0 +-0.3 0 +0.000000000000000000 0 +0 0 +0 0 +0.01 0 +0.02 0 +0.1 0 +0.2 0 +0.3 0 +0.33 0 +0.333 0 +1 1 +1.0 1 +1.000000000000000000 1 +1.12 1 +1.122 1 +2 2 +2 2 +3.14 3 +3.14 3 +3.14 3 +3.140 4 +10 10 +20 20 +100 100 +124.00 124 +125.2 125 +200 200 +1234567890.1234567800 1234567890 +PREHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key DESC, value DESC +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key DESC, value DESC +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +1234567890.1234567800 1234567890 +200 200 +125.2 125 +124.00 124 +100 100 +20 20 +10 10 +3.140 4 +3.14 3 +3.14 3 +3.14 3 +2 2 +2 2 +1.122 1 +1.12 1 +1.000000000000000000 1 +1.0 1 +1 1 +0.333 0 +0.33 0 +0.3 0 +0.2 0 +0.1 0 +0.02 0 +0.01 0 +0 0 +0 0 +0.000000000000000000 0 +-0.3 0 +-0.33 0 +-0.333 0 +-1.12 -1 +-1.12 -1 +-1.122 -11 +-1255.49 -1255 +-4400 4400 +-1234567890.1234567890 -1234567890 +NULL 0 +PREHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +NULL 0 +-1234567890.1234567890 -1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -1 +-1.12 -1 +-0.333 0 +-0.33 0 +-0.3 0 +0.000000000000000000 0 +0 0 +0 0 +0.01 0 +0.02 0 +0.1 0 +0.2 0 +0.3 0 +0.33 0 +0.333 0 +1 1 +1.0 1 +1.000000000000000000 1 +1.12 1 +1.122 1 +2 2 +2 2 +3.14 3 +3.14 3 +3.14 3 +3.140 4 +10 10 +20 20 +100 100 +124.00 124 +125.2 125 +200 200 +1234567890.1234567800 1234567890 +PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +NULL +-1234567890.1234567890 +-4400 +-1255.49 +-1.122 +-1.12 +-0.333 +-0.33 +-0.3 +0.000000000000000000 +0.01 +0.02 +0.1 +0.2 +0.3 +0.33 +0.333 +1 +1.12 +1.122 +2 +3.14 +10 +20 +100 +124.00 +125.2 +200 +1234567890.1234567800 +PREHOOK: query: SELECT key, sum(value) FROM DECIMAL_3 GROUP BY key ORDER BY key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT key, sum(value) FROM DECIMAL_3 GROUP BY key ORDER BY key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +NULL 0 +-1234567890.1234567890 -1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -2 +-0.333 0 +-0.33 0 +-0.3 0 +0.000000000000000000 0 +0.01 0 +0.02 0 +0.1 0 +0.2 0 +0.3 0 +0.33 0 +0.333 0 +1 3 +1.12 1 +1.122 1 +2 4 +3.14 13 +10 10 +20 20 +100 100 +124.00 124 +125.2 125 +200 200 +1234567890.1234567800 1234567890 +PREHOOK: query: SELECT value, sum(key) FROM DECIMAL_3 GROUP BY value ORDER BY value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT value, sum(key) FROM DECIMAL_3 GROUP BY value ORDER BY value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +-1234567890 -1234567890.1234567890 +-1255 -1255.49 +-11 -1.122 +-1 -2.24 +0 0.330000000000000000 +1 5.242000000000000000 +2 4 +3 9.42 +4 3.140 +10 10 +20 20 +100 100 +124 124.00 +125 125.2 +200 200 +4400 -4400 +1234567890 1234567890.1234567800 +PREHOOK: query: SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +-1234567890.1234567890 -1234567890 -1234567890.1234567890 -1234567890 +-4400 4400 -4400 4400 +-1255.49 -1255 -1255.49 -1255 +-1.122 -11 -1.122 -11 +-1.12 -1 -1.12 -1 +-1.12 -1 -1.12 -1 +-1.12 -1 -1.12 -1 +-1.12 -1 -1.12 -1 +-0.333 0 -0.333 0 +-0.33 0 -0.33 0 +-0.3 0 -0.3 0 +0.000000000000000000 0 0.000000000000000000 0 +0 0 0 0 +0 0 0 0 +0 0 0 0 +0 0 0 0 +0.01 0 0.01 0 +0.02 0 0.02 0 +0.1 0 0.1 0 +0.2 0 0.2 0 +0.3 0 0.3 0 +0.33 0 0.33 0 +0.333 0 0.333 0 +1 1 1 1 +1.0 1 1.0 1 +1.000000000000000000 1 1.000000000000000000 1 +1.12 1 1.12 1 +1.122 1 1.122 1 +2 2 2 2 +2 2 2 2 +2 2 2 2 +2 2 2 2 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.14 3 3.14 3 +3.140 4 3.140 4 +10 10 10 10 +20 20 20 20 +100 100 100 100 +124.00 124 124.00 124 +125.2 125 125.2 125 +200 200 200 200 +1234567890.1234567800 1234567890 1234567890.1234567800 1234567890 +PREHOOK: query: SELECT * FROM DECIMAL_3 WHERE key=3.14 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 WHERE key=3.14 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +3.14 3 +3.14 3 +3.14 3 +3.140 4 +PREHOOK: query: SELECT * FROM DECIMAL_3 WHERE key=3.140 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_3 WHERE key=3.140 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_3 +#### A masked pattern was here #### +3.14 3 +3.14 3 +3.14 3 +3.140 4 +PREHOOK: query: DROP TABLE DECIMAL_3_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_3_txt +PREHOOK: Output: default@decimal_3_txt +POSTHOOK: query: DROP TABLE DECIMAL_3_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_3_txt +POSTHOOK: Output: default@decimal_3_txt +PREHOOK: query: DROP TABLE DECIMAL_3 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_3 +PREHOOK: Output: default@decimal_3 +POSTHOOK: query: DROP TABLE DECIMAL_3 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_3 +POSTHOOK: Output: default@decimal_3 diff --git ql/src/test/results/clientpositive/vector_decimal_4.q.out ql/src/test/results/clientpositive/vector_decimal_4.q.out new file mode 100644 index 0000000..483ae1f --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_4.q.out @@ -0,0 +1,250 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_4_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_4_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_4_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_4_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_4_1(key decimal(35,25), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_4_1 +POSTHOOK: query: CREATE TABLE DECIMAL_4_1(key decimal(35,25), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_4_1 +PREHOOK: query: CREATE TABLE DECIMAL_4_2(key decimal(35,25), value decimal(35,25)) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_4_2 +POSTHOOK: query: CREATE TABLE DECIMAL_4_2(key decimal(35,25), value decimal(35,25)) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_4_2 +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_4_1 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_4_1 +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_4_2 SELECT key, key * 3 FROM DECIMAL_4_1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_4_1 +PREHOOK: Output: default@decimal_4_2 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_4_2 SELECT key, key * 3 FROM DECIMAL_4_1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_4_1 +POSTHOOK: Output: default@decimal_4_2 +POSTHOOK: Lineage: decimal_4_2.key SIMPLE [(decimal_4_1)decimal_4_1.FieldSchema(name:key, type:decimal(35,25), comment:null), ] +POSTHOOK: Lineage: decimal_4_2.value EXPRESSION [(decimal_4_1)decimal_4_1.FieldSchema(name:key, type:decimal(35,25), comment:null), ] +PREHOOK: query: SELECT * FROM DECIMAL_4_1 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_4_1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_4_1 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_4_1 +#### A masked pattern was here #### +NULL 0 +-1234567890.1234567890 -1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -1 +-1.12 -1 +-0.333 0 +-0.33 0 +-0.3 0 +0.0000000000000000000000000 0 +0 0 +0 0 +0.01 0 +0.02 0 +0.1 0 +0.2 0 +0.3 0 +0.33 0 +0.333 0 +0.9999999999999999999999999 1 +1 1 +1.0 1 +1.12 1 +1.122 1 +2 2 +2 2 +3.14 3 +3.14 3 +3.14 3 +3.140 4 +10 10 +20 20 +100 100 +124.00 124 +125.2 125 +200 200 +1234567890.1234567800 1234567890 +PREHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +NULL NULL +-1234567890.1234567890 -3703703670.3703703670 +-4400 -13200 +-1255.49 -3766.47 +-1.122 -3.366 +-1.12 -3.36 +-1.12 -3.36 +-0.333 -0.999 +-0.33 -0.99 +-0.3 -0.9 +0.0000000000000000000000000 0.0000000000000000000000000 +0 0 +0 0 +0.01 0.03 +0.02 0.06 +0.1 0.3 +0.2 0.6 +0.3 0.9 +0.33 0.99 +0.333 0.999 +0.9999999999999999999999999 2.9999999999999999999999997 +1 3 +1.0 3.0 +1.12 3.36 +1.122 3.366 +2 6 +2 6 +3.14 9.42 +3.14 9.42 +3.14 9.42 +3.140 9.420 +10 30 +20 60 +100 300 +124.00 372.00 +125.2 375.6 +200 600 +1234567890.1234567800 3703703670.3703703400 +PREHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +NULL NULL +-1234567890.1234567890 -3703703670.3703703670 +-4400 -13200 +-1255.49 -3766.47 +-1.122 -3.366 +-1.12 -3.36 +-1.12 -3.36 +-0.333 -0.999 +-0.33 -0.99 +-0.3 -0.9 +0.0000000000000000000000000 0.0000000000000000000000000 +0 0 +0 0 +0.01 0.03 +0.02 0.06 +0.1 0.3 +0.2 0.6 +0.3 0.9 +0.33 0.99 +0.333 0.999 +0.9999999999999999999999999 2.9999999999999999999999997 +1 3 +1.0 3.0 +1.12 3.36 +1.122 3.366 +2 6 +2 6 +3.14 9.42 +3.14 9.42 +3.14 9.42 +3.140 9.420 +10 30 +20 60 +100 300 +124.00 372.00 +125.2 375.6 +200 600 +1234567890.1234567800 3703703670.3703703400 +PREHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_4_2 +#### A masked pattern was here #### +NULL NULL +-1234567890.1234567890 -3703703670.3703703670 +-4400 -13200 +-1255.49 -3766.47 +-1.122 -3.366 +-1.12 -3.36 +-1.12 -3.36 +-0.333 -0.999 +-0.33 -0.99 +-0.3 -0.9 +0.0000000000000000000000000 0.0000000000000000000000000 +0 0 +0 0 +0.01 0.03 +0.02 0.06 +0.1 0.3 +0.2 0.6 +0.3 0.9 +0.33 0.99 +0.333 0.999 +0.9999999999999999999999999 2.9999999999999999999999997 +1 3 +1.0 3.0 +1.12 3.36 +1.122 3.366 +2 6 +2 6 +3.14 9.42 +3.14 9.42 +3.14 9.42 +3.140 9.420 +10 30 +20 60 +100 300 +124.00 372.00 +125.2 375.6 +200 600 +1234567890.1234567800 3703703670.3703703400 +PREHOOK: query: DROP TABLE DECIMAL_4_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_4_1 +PREHOOK: Output: default@decimal_4_1 +POSTHOOK: query: DROP TABLE DECIMAL_4_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_4_1 +POSTHOOK: Output: default@decimal_4_1 +PREHOOK: query: DROP TABLE DECIMAL_4_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_4_2 +PREHOOK: Output: default@decimal_4_2 +POSTHOOK: query: DROP TABLE DECIMAL_4_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_4_2 +POSTHOOK: Output: default@decimal_4_2 diff --git ql/src/test/results/clientpositive/vector_decimal_5.q.out ql/src/test/results/clientpositive/vector_decimal_5.q.out new file mode 100644 index 0000000..01b5f42 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_5.q.out @@ -0,0 +1,239 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_5_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_5_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_5 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_5 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_5_txt(key decimal(10,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_5_txt +POSTHOOK: query: CREATE TABLE DECIMAL_5_txt(key decimal(10,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_5_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_5_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_5_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_5_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_5_txt +PREHOOK: query: CREATE TABLE DECIMAL_5(key decimal(10,5), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_5 +POSTHOOK: query: CREATE TABLE DECIMAL_5(key decimal(10,5), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_5 +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_5 SELECT * FROM DECIMAL_5_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_5_txt +PREHOOK: Output: default@decimal_5 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_5 SELECT * FROM DECIMAL_5_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_5_txt +POSTHOOK: Output: default@decimal_5 +POSTHOOK: Lineage: decimal_5.key SIMPLE [(decimal_5_txt)decimal_5_txt.FieldSchema(name:key, type:decimal(10,5), comment:null), ] +POSTHOOK: Lineage: decimal_5.value SIMPLE [(decimal_5_txt)decimal_5_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: SELECT key FROM DECIMAL_5 ORDER BY key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT key FROM DECIMAL_5 ORDER BY key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +NULL +NULL +NULL +-4400 +-1255.49 +-1.122 +-1.12 +-1.12 +-0.333 +-0.33 +-0.3 +0.00000 +0 +0 +0.01 +0.02 +0.1 +0.2 +0.3 +0.33 +0.333 +1 +1.0 +1.00000 +1.12 +1.122 +2 +2 +3.14 +3.14 +3.14 +3.140 +10 +20 +100 +124.00 +125.2 +200 +PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +NULL +-4400 +-1255.49 +-1.122 +-1.12 +-0.333 +-0.33 +-0.3 +0.00000 +0.01 +0.02 +0.1 +0.2 +0.3 +0.33 +0.333 +1 +1.12 +1.122 +2 +3.14 +10 +20 +100 +124.00 +125.2 +200 +PREHOOK: query: SELECT cast(key as decimal) FROM DECIMAL_5 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT cast(key as decimal) FROM DECIMAL_5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +-4400 +NULL +0 +0 +100 +10 +1 +0 +0 +200 +20 +2 +0 +0 +0 +0 +0 +0 +0 +0 +0 +1 +2 +3 +-1 +-1 +-1 +1 +1 +124 +125 +-1255 +3 +3 +3 +1 +NULL +NULL +PREHOOK: query: SELECT cast(key as decimal(6,3)) FROM DECIMAL_5 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT cast(key as decimal(6,3)) FROM DECIMAL_5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_5 +#### A masked pattern was here #### +NULL +NULL +0.000 +0 +100 +10 +1 +0.1 +0.01 +200 +20 +2 +0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +1.0 +2 +3.14 +-1.12 +-1.12 +-1.122 +1.12 +1.122 +124.00 +125.2 +NULL +3.14 +3.14 +3.140 +1.000 +NULL +NULL +PREHOOK: query: DROP TABLE DECIMAL_5_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_5_txt +PREHOOK: Output: default@decimal_5_txt +POSTHOOK: query: DROP TABLE DECIMAL_5_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_5_txt +POSTHOOK: Output: default@decimal_5_txt +PREHOOK: query: DROP TABLE DECIMAL_5 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_5 +PREHOOK: Output: default@decimal_5 +POSTHOOK: query: DROP TABLE DECIMAL_5 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_5 +POSTHOOK: Output: default@decimal_5 diff --git ql/src/test/results/clientpositive/vector_decimal_6.q.out ql/src/test/results/clientpositive/vector_decimal_6.q.out new file mode 100644 index 0000000..7ecd500 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_6.q.out @@ -0,0 +1,303 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_6_1_txt(key decimal(10,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_6_1_txt +POSTHOOK: query: CREATE TABLE DECIMAL_6_1_txt(key decimal(10,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_6_1_txt +PREHOOK: query: CREATE TABLE DECIMAL_6_2_txt(key decimal(17,4), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_6_2_txt +POSTHOOK: query: CREATE TABLE DECIMAL_6_2_txt(key decimal(17,4), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_6_2_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_6_1_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_6_1_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_2_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_6_2_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_2_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_6_2_txt +PREHOOK: query: CREATE TABLE DECIMAL_6_1(key decimal(10,5), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_6_1 +POSTHOOK: query: CREATE TABLE DECIMAL_6_1(key decimal(10,5), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_6_1 +PREHOOK: query: CREATE TABLE DECIMAL_6_2(key decimal(17,4), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_6_2 +POSTHOOK: query: CREATE TABLE DECIMAL_6_2(key decimal(17,4), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_6_2 +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_1 SELECT * FROM DECIMAL_6_1_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1_txt +PREHOOK: Output: default@decimal_6_1 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_1 SELECT * FROM DECIMAL_6_1_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1_txt +POSTHOOK: Output: default@decimal_6_1 +POSTHOOK: Lineage: decimal_6_1.key SIMPLE [(decimal_6_1_txt)decimal_6_1_txt.FieldSchema(name:key, type:decimal(10,5), comment:null), ] +POSTHOOK: Lineage: decimal_6_1.value SIMPLE [(decimal_6_1_txt)decimal_6_1_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_2 SELECT * FROM DECIMAL_6_2_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_2_txt +PREHOOK: Output: default@decimal_6_2 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_2 SELECT * FROM DECIMAL_6_2_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_2_txt +POSTHOOK: Output: default@decimal_6_2 +POSTHOOK: Lineage: decimal_6_2.key SIMPLE [(decimal_6_2_txt)decimal_6_2_txt.FieldSchema(name:key, type:decimal(17,4), comment:null), ] +POSTHOOK: Lineage: decimal_6_2.value SIMPLE [(decimal_6_2_txt)decimal_6_2_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: SELECT * FROM DECIMAL_6_1 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_6_1 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1 +#### A masked pattern was here #### +NULL -1234567890 +NULL 0 +NULL 3 +NULL 4 +NULL 1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -1 +-0.333 0 +-0.3 0 +0.00000 0 +0 0 +0.333 0 +1.0 1 +1.00000 1 +1.12 1 +1.122 1 +2 2 +3.14 3 +3.14 3 +3.140 4 +10 10 +10.73433 5 +124.00 124 +125.2 125 +23232.23435 2 +PREHOOK: query: SELECT * FROM DECIMAL_6_2 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_6_2 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +NULL 0 +-1234567890.1235 -1234567890 +-4400 4400 +-1255.49 -1255 +-1.122 -11 +-1.12 -1 +-0.333 0 +-0.3 0 +0.0000 0 +0 0 +0.333 0 +1.0 1 +1.0000 1 +1.12 1 +1.122 1 +2 2 +3.14 3 +3.14 3 +3.140 4 +10 10 +10.7343 5 +124.00 124 +125.2 125 +23232.2344 2 +2389432.2375 3 +2389432.2375 4 +1234567890.1235 1234567890 +PREHOOK: query: SELECT T.key from ( + SELECT key, value from DECIMAL_6_1 + UNION ALL + SELECT key, value from DECIMAL_6_2 +) T order by T.key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1 +PREHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT T.key from ( + SELECT key, value from DECIMAL_6_1 + UNION ALL + SELECT key, value from DECIMAL_6_2 +) T order by T.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1 +POSTHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +NULL +NULL +NULL +NULL +NULL +NULL +-1234567890.1235 +-4400 +-4400 +-1255.49 +-1255.49 +-1.122 +-1.122 +-1.12 +-1.12 +-0.333 +-0.333 +-0.3 +-0.3 +0.00000 +0.0000 +0 +0 +0.333 +0.333 +1.0 +1.0 +1.0000 +1.00000 +1.12 +1.12 +1.122 +1.122 +2 +2 +3.14 +3.14 +3.14 +3.14 +3.140 +3.140 +10 +10 +10.7343 +10.73433 +124.00 +124.00 +125.2 +125.2 +23232.23435 +23232.2344 +2389432.2375 +2389432.2375 +1234567890.1235 +PREHOOK: query: CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@decimal_6_1 +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_6_3 +POSTHOOK: query: CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@decimal_6_1 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_6_3 +PREHOOK: query: desc DECIMAL_6_3 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@decimal_6_3 +POSTHOOK: query: desc DECIMAL_6_3 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@decimal_6_3 +k double +v int +PREHOOK: query: SELECT * FROM DECIMAL_6_3 ORDER BY k, v +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_6_3 ORDER BY k, v +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_3 +#### A masked pattern was here #### +NULL -695344902 +NULL 0 +NULL 33 +NULL 44 +NULL 695344902 +-4394.5 48400 +-1249.99 -13805 +4.378 -121 +4.38 -11 +5.167 0 +5.2 0 +5.5 0 +5.5 0 +5.833 0 +6.5 11 +6.5 11 +6.62 11 +6.622 11 +7.5 22 +8.64 33 +8.64 33 +8.64 44 +15.5 110 +16.23433 55 +129.5 1364 +130.7 1375 +23237.73435 22 diff --git ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out index c26fc9d..7f2cd53 100644 --- ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out +++ ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out @@ -106,14 +106,14 @@ POSTHOOK: query: SELECT cint, POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_vgby #### A masked pattern was here #### -NULL 3072 9318.4351351351 -4298.1513513514 5018444.1081079808 3072 11160.71538461538500 -5147.90769230769300 6010604.30769230735360 --3728 6 5831542.2692483780 -3367.6517567568 5817556.0411483778 6 6984454.21109769200000 -4033.445769230769 6967702.86724384584710 --563 2 -515.6210729730 -3367.6517567568 -3883.2728297298 2 -617.56077692307690 -4033.445769230769 -4651.00654615384590 -762 2 5831542.2692483780 1531.2194054054 5833073.4886537834 2 6984454.21109769200000 1833.9456923076925 6986288.15678999969250 -6981 3 5831542.269248378 -515.6210729730 5830511.0271024320 3 6984454.211097692 -617.56077692307690 6983219.08954384584620 -253665376 1024 9767.0054054054 -9779.5486486487 -347484.0818378374 1024 11697.96923076923100 -11712.99230769231000 -416182.64030769233089 -528534767 1024 5831542.2692483780 -9777.1594594595 11646372.8607481068 1024 6984454.21109769200000 -11710.13076923077100 13948892.79980307629003 -626923679 1024 9723.4027027027 -9778.9513513514 10541.0525297287 1024 11645.74615384615400 -11712.27692307692300 12625.04759999997746 +NULL 3072 9318.4351351351 -4298.1513513514 5018444.1081079808 3072 11160.715384615385 -5147.907692307693 6010604.3076923073536 +-3728 6 5831542.269248378 -3367.6517567568 5817556.0411483778 6 6984454.211097692 -4033.445769230769 6967702.8672438458471 +-563 2 -515.6210729730 -3367.6517567568 -3883.2728297298 2 -617.5607769230769 -4033.445769230769 -4651.0065461538459 +762 2 5831542.269248378 1531.2194054054 5833073.4886537834 2 6984454.211097692 1833.9456923076925 6986288.1567899996925 +6981 3 5831542.269248378 -515.6210729730 5830511.0271024320 3 6984454.211097692 -617.5607769230769 6983219.0895438458462 +253665376 1024 9767.0054054054 -9779.5486486487 -347484.0818378374 1024 11697.969230769231 -11712.99230769231 -416182.64030769233089 +528534767 1024 5831542.269248378 -9777.1594594595 11646372.8607481068 1024 6984454.211097692 -11710.130769230771 13948892.79980307629003 +626923679 1024 9723.4027027027 -9778.9513513514 10541.0525297287 1024 11645.746153846154 -11712.276923076923 12625.04759999997746 PREHOOK: query: -- Now add the others... EXPLAIN SELECT cint, COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1), @@ -204,11 +204,11 @@ POSTHOOK: query: SELECT cint, POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_vgby #### A masked pattern was here #### -NULL 3072 9318.4351351351 -4298.1513513514 5018444.1081079808 1633.60810810806667 5695.483082135364 5696.4103077145055 3072 11160.71538461538500 -5147.90769230769300 6010604.30769230735360 1956.576923076922966667 6821.495748565159 6822.606289190924 --3728 6 5831542.2692483780 -3367.6517567568 5817556.0411483778 969592.67352472963333 2174330.2092403853 2381859.406131774 6 6984454.21109769200000 -4033.445769230769 6967702.86724384584710 1161283.811207307641183333 2604201.2704476737 2852759.5602156054 --563 2 -515.6210729730 -3367.6517567568 -3883.2728297298 -1941.6364148649 1426.0153418918999 2016.6902366556308 2 -617.56077692307690 -4033.445769230769 -4651.00654615384590 -2325.50327307692295 1707.9424961538462 2415.395441814127 -762 2 5831542.2692483780 1531.2194054054 5833073.4886537834 2916536.7443268917 2915005.5249214866 4122440.3477364695 2 6984454.21109769200000 1833.9456923076925 6986288.15678999969250 3493144.07839499984625 3491310.1327026924 4937458.140118758 -6981 3 5831542.269248378 -515.6210729730 5830511.0271024320 1943503.67570081066667 2749258.455012492 3367140.1929065133 3 6984454.211097692 -617.56077692307690 6983219.08954384584620 2327739.696514615282066667 3292794.4113115156 4032833.0678006653 -253665376 1024 9767.0054054054 -9779.5486486487 -347484.0818378374 -339.33992366976309 5708.9563478862 5711.745967572779 1024 11697.96923076923100 -11712.99230769231000 -416182.64030769233089 -406.428359675480791885 6837.632716002934 6840.973851172274 -528534767 1024 5831542.2692483780 -9777.1594594595 11646372.8607481068 11373.41099682432305 257528.92988206653 257654.7686043977 1024 6984454.21109769200000 -11710.13076923077100 13948892.79980307629003 13621.965624807691689482 308443.1074570801 308593.82484083984 -626923679 1024 9723.4027027027 -9778.9513513514 10541.0525297287 10.29399661106318 5742.09145323734 5744.897264034267 1024 11645.74615384615400 -11712.27692307692300 12625.04759999997746 12.329148046874977988 6877.318722794877 6880.679250101603 +NULL 3072 9318.4351351351 -4298.1513513514 5018444.1081079808 1633.60810810806667 5695.483082135364 5696.4103077145055 3072 11160.715384615385 -5147.907692307693 6010604.3076923073536 1956.576923076922966667 6821.495748565159 6822.606289190924 +-3728 6 5831542.269248378 -3367.6517567568 5817556.0411483778 969592.67352472963333 2174330.2092403853 2381859.406131774 6 6984454.211097692 -4033.445769230769 6967702.8672438458471 1161283.811207307641183333 2604201.2704476737 2852759.5602156054 +-563 2 -515.6210729730 -3367.6517567568 -3883.2728297298 -1941.6364148649 1426.0153418918999 2016.6902366556308 2 -617.5607769230769 -4033.445769230769 -4651.0065461538459 -2325.50327307692295 1707.9424961538462 2415.395441814127 +762 2 5831542.269248378 1531.2194054054 5833073.4886537834 2916536.7443268917 2915005.5249214866 4122440.3477364695 2 6984454.211097692 1833.9456923076925 6986288.1567899996925 3493144.07839499984625 3491310.1327026924 4937458.140118758 +6981 3 5831542.269248378 -515.6210729730 5830511.0271024320 1943503.67570081066667 2749258.455012492 3367140.1929065133 3 6984454.211097692 -617.5607769230769 6983219.0895438458462 2327739.696514615282066667 3292794.4113115156 4032833.0678006653 +253665376 1024 9767.0054054054 -9779.5486486487 -347484.0818378374 -339.33992366976309 5708.9563478862 5711.745967572779 1024 11697.969230769231 -11712.99230769231 -416182.64030769233089 -406.428359675480791885 6837.632716002934 6840.973851172274 +528534767 1024 5831542.269248378 -9777.1594594595 11646372.8607481068 11373.41099682432305 257528.92988206653 257654.7686043977 1024 6984454.211097692 -11710.130769230771 13948892.79980307629003 13621.965624807691689482 308443.1074570801 308593.82484083984 +626923679 1024 9723.4027027027 -9778.9513513514 10541.0525297287 10.29399661106318 5742.09145323734 5744.897264034267 1024 11645.746153846154 -11712.276923076923 12625.04759999997746 12.329148046874977988 6877.318722794877 6880.679250101603 diff --git ql/src/test/results/clientpositive/vector_decimal_cast.q.out ql/src/test/results/clientpositive/vector_decimal_cast.q.out index 83ea4cc..d489046 100644 --- ql/src/test/results/clientpositive/vector_decimal_cast.q.out +++ ql/src/test/results/clientpositive/vector_decimal_cast.q.out @@ -46,13 +46,13 @@ POSTHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS D POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### --13326.0 528534767 true 1969-12-31 15:59:46.674 -13326.0000000000 528534767.00000000000000 1.00 -13 --15813.0 528534767 true 1969-12-31 15:59:55.787 -15813.0000000000 528534767.00000000000000 1.00 -4 --9566.0 528534767 true 1969-12-31 15:59:44.187 -9566.0000000000 528534767.00000000000000 1.00 -16 -15007.0 528534767 true 1969-12-31 15:59:50.434 15007.0000000000 528534767.00000000000000 1.00 -10 -7021.0 528534767 true 1969-12-31 16:00:15.007 7021.0000000000 528534767.00000000000000 1.00 15 -4963.0 528534767 true 1969-12-31 16:00:07.021 4963.0000000000 528534767.00000000000000 1.00 7 --7824.0 528534767 true 1969-12-31 16:00:04.963 -7824.0000000000 528534767.00000000000000 1.00 5 --15431.0 528534767 true 1969-12-31 15:59:52.176 -15431.0000000000 528534767.00000000000000 1.00 -8 --15549.0 528534767 true 1969-12-31 15:59:44.569 -15549.0000000000 528534767.00000000000000 1.00 -15 -5780.0 528534767 true 1969-12-31 15:59:44.451 5780.0000000000 528534767.00000000000000 1.00 -16 +-13326.0 528534767 true 1969-12-31 15:59:46.674 -13326.0 528534767 1 -13 +-15813.0 528534767 true 1969-12-31 15:59:55.787 -15813.0 528534767 1 -4 +-9566.0 528534767 true 1969-12-31 15:59:44.187 -9566.0 528534767 1 -16 +15007.0 528534767 true 1969-12-31 15:59:50.434 15007.0 528534767 1 -10 +7021.0 528534767 true 1969-12-31 16:00:15.007 7021.0 528534767 1 15 +4963.0 528534767 true 1969-12-31 16:00:07.021 4963.0 528534767 1 7 +-7824.0 528534767 true 1969-12-31 16:00:04.963 -7824.0 528534767 1 5 +-15431.0 528534767 true 1969-12-31 15:59:52.176 -15431.0 528534767 1 -8 +-15549.0 528534767 true 1969-12-31 15:59:44.569 -15549.0 528534767 1 -15 +5780.0 528534767 true 1969-12-31 15:59:44.451 5780.0 528534767 1 -16 diff --git ql/src/test/results/clientpositive/vector_decimal_expressions.q.out ql/src/test/results/clientpositive/vector_decimal_expressions.q.out index ca94561..2c15b9a 100644 --- ql/src/test/results/clientpositive/vector_decimal_expressions.q.out +++ ql/src/test/results/clientpositive/vector_decimal_expressions.q.out @@ -56,13 +56,13 @@ POSTHOOK: query: SELECT cdecimal1 + cdecimal2, cdecimal1 - (2*cdecimal2), ((cdec POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_test #### A masked pattern was here #### -19699.41746361742300 -12507.91330561334600 0.8351496686995997 2.8303425077026896E7 3.6405405405 8963 10735 -17 8963 true 10735.776923076923 8963.641 10735.776923076923 1969-12-31 18:29:23.64054054 -9216.33970893968500 -5851.80644490647000 0.8353975893550668 6195112.1797296945 3.6243243243 4193 5022 -98 4193 true 5022.715384615385 4193.6245 5022.715384615385 1969-12-31 17:09:53.624324324 -6514.84033264034640 -4136.52120582119280 0.8355907765708067 3095563.9418919063 4.3864864865 2964 3550 -34 2964 true 3550.4538461538464 2964.3865 3550.4538461538464 1969-12-31 16:49:24.386486486 -7587.30145530147700 -4817.46777546775400 0.8354976172734904 4198623.24324327 2.3783783784 3452 4134 38 3452 true 4134.923076923077 3452.3784 4134.923076923077 1969-12-31 16:57:32.378378378 -19197.97297297300000 -12189.52702702700000 0.835155361813429 2.6880848817567654E7 5.4729729730 8735 10462 -34 8735 true 10462.5 8735.473 10462.5 1969-12-31 18:25:35.472972973 -17098.99459459460000 -10856.80540540540000 0.8351828165813104 2.132423090270272E7 0.3945945946 7780 9318 102 7780 true 9318.6 7780.3945 9318.6 1969-12-31 18:09:40.394594594 -12433.72307692307700 -7894.64615384615400 0.8352770361086894 1.12754688E7 7.6000000000 5657 6776 120 5657 true 6776.123076923077 5657.6 6776.123076923077 1969-12-31 17:34:17.6 -7247.31683991686200 -4601.59854469852400 0.8355241651897876 3830775.6932432684 7.6783783784 3297 3949 109 3297 true 3949.638461538462 3297.6785 3949.638461538462 1969-12-31 16:54:57.678378378 -14757.17006237004650 -9369.89147609149300 0.8352226654922171 1.5883214124324286E7 4.8162162162 6714 8042 106 6714 true 8042.3538461538465 6714.8164 8042.3538461538465 1969-12-31 17:51:54.816216216 -10964.83201663199300 -6961.99106029108600 0.8353232978714221 8768719.779729689 9.2243243243 4989 5975 87 4989 true 5975.607692307693 4989.224 5975.607692307693 1969-12-31 17:23:09.224324324 +19699.417463617423 -12507.913305613346 0.8351496686995997 2.8303425077026896E7 3.6405405405 8963 10735 -17 8963 true 10735.776923076923 8963.641 10735.776923076923 1969-12-31 18:29:23.64054054 +9216.339708939685 -5851.806444906470 0.8353975893550668 6195112.1797296945 3.6243243243 4193 5022 -98 4193 true 5022.715384615385 4193.6245 5022.715384615385 1969-12-31 17:09:53.624324324 +6514.8403326403464 -4136.5212058211928 0.8355907765708067 3095563.9418919063 4.3864864865 2964 3550 -34 2964 true 3550.4538461538464 2964.3865 3550.4538461538464 1969-12-31 16:49:24.386486486 +7587.301455301477 -4817.467775467754 0.8354976172734904 4198623.24324327 2.3783783784 3452 4134 38 3452 true 4134.923076923077 3452.3784 4134.923076923077 1969-12-31 16:57:32.378378378 +19197.9729729730 -12189.5270270270 0.835155361813429 2.6880848817567654E7 5.4729729730 8735 10462 -34 8735 true 10462.5 8735.473 10462.5 1969-12-31 18:25:35.472972973 +17098.9945945946 -10856.8054054054 0.8351828165813104 2.132423090270272E7 0.3945945946 7780 9318 102 7780 true 9318.6 7780.3945 9318.6 1969-12-31 18:09:40.394594594 +12433.723076923077 -7894.646153846154 0.8352770361086894 1.12754688E7 7.6 5657 6776 120 5657 true 6776.123076923077 5657.6 6776.123076923077 1969-12-31 17:34:17.6 +7247.316839916862 -4601.598544698524 0.8355241651897876 3830775.6932432684 7.6783783784 3297 3949 109 3297 true 3949.638461538462 3297.6785 3949.638461538462 1969-12-31 16:54:57.678378378 +14757.1700623700465 -9369.8914760914930 0.8352226654922171 1.5883214124324286E7 4.8162162162 6714 8042 106 6714 true 8042.3538461538465 6714.8164 8042.3538461538465 1969-12-31 17:51:54.816216216 +10964.832016631993 -6961.991060291086 0.8353232978714221 8768719.779729689 9.2243243243 4989 5975 87 4989 true 5975.607692307693 4989.224 5975.607692307693 1969-12-31 17:23:09.224324324 diff --git ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out index 2cfa45a..24765af 100644 --- ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out +++ ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out @@ -195,14 +195,14 @@ and sin(cdecimal1) >= -1.0 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_test #### A masked pattern was here #### --119.4594594595 -119.46 -119 -120 -119 1.316485E-52 NULL NULL NULL NULL NULL NULL NULL NULL 119.459459459500000000 -0.07885666683797002 NaN 0.9968859644388647 NaN -1.5624254815943668 -6844.522849943508 -2.0849608902209606 -119.4594594595 119.4594594595 -1 NULL -9318.4351351351 9318.44 9318 9318 9319 Infinity 9.13974998962673 3.969342986470191 13.185871984999437 NULL 13.185871984999437 173.867220004793 173.867220004793 96.53204201266593 9318.435135135100000000 0.4540668481851705 NaN 0.8909676185918236 NaN 1.5706890126394983 533907.0049096602 162.63737424163023 9318.4351351351 -9318.4351351351 1 -0.9607267417229353 -9318.4351351351 9318.44 9318 9318 9319 Infinity 9.13974998962673 3.969342986470191 13.185871984999437 NULL 13.185871984999437 173.867220004793 173.867220004793 96.53204201266593 9318.435135135100000000 0.4540668481851705 NaN 0.8909676185918236 NaN 1.5706890126394983 533907.0049096602 162.63737424163023 9318.4351351351 -9318.4351351351 1 -0.9607267417229353 -9318.4351351351 9318.44 9318 9318 9319 Infinity 9.13974998962673 3.969342986470191 13.185871984999437 NULL 13.185871984999437 173.867220004793 173.867220004793 96.53204201266593 9318.435135135100000000 0.4540668481851705 NaN 0.8909676185918236 NaN 1.5706890126394983 533907.0049096602 162.63737424163023 9318.4351351351 -9318.4351351351 1 -0.9607267417229353 --4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.151351351400000000 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL --4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.151351351400000000 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL --4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.151351351400000000 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL --4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.151351351400000000 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL --4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.151351351400000000 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL --4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.151351351400000000 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL --4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.151351351400000000 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-119.4594594595 -119.46 -119 -120 -119 1.316485E-52 NULL NULL NULL NULL NULL NULL NULL NULL 119.4594594595 -0.07885666683797002 NaN 0.9968859644388647 NaN -1.5624254815943668 -6844.522849943508 -2.0849608902209606 -119.4594594595 119.4594594595 -1 NULL +9318.4351351351 9318.44 9318 9318 9319 Infinity 9.13974998962673 3.969342986470191 13.185871984999437 NULL 13.185871984999437 173.867220004793 173.867220004793 96.53204201266593 9318.4351351351 0.4540668481851705 NaN 0.8909676185918236 NaN 1.5706890126394983 533907.0049096602 162.63737424163023 9318.4351351351 -9318.4351351351 1 -0.9607267417229353 +9318.4351351351 9318.44 9318 9318 9319 Infinity 9.13974998962673 3.969342986470191 13.185871984999437 NULL 13.185871984999437 173.867220004793 173.867220004793 96.53204201266593 9318.4351351351 0.4540668481851705 NaN 0.8909676185918236 NaN 1.5706890126394983 533907.0049096602 162.63737424163023 9318.4351351351 -9318.4351351351 1 -0.9607267417229353 +9318.4351351351 9318.44 9318 9318 9319 Infinity 9.13974998962673 3.969342986470191 13.185871984999437 NULL 13.185871984999437 173.867220004793 173.867220004793 96.53204201266593 9318.4351351351 0.4540668481851705 NaN 0.8909676185918236 NaN 1.5706890126394983 533907.0049096602 162.63737424163023 9318.4351351351 -9318.4351351351 1 -0.9607267417229353 +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL +-4298.1513513514 -4298.15 -4298 -4299 -4298 0.0 NULL NULL NULL NULL NULL NULL NULL NULL 4298.1513513514 -0.43730633941118113 NaN 0.899312607223313 NaN -1.5705636686355597 -246265.93214088667 -75.01689283012556 -4298.1513513514 4298.1513513514 -1 NULL diff --git ql/src/test/results/clientpositive/vector_decimal_precision.q.out ql/src/test/results/clientpositive/vector_decimal_precision.q.out new file mode 100644 index 0000000..8789864 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_precision.q.out @@ -0,0 +1,669 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_PRECISION_txt(dec decimal(20,10)) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_PRECISION_txt +POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION_txt(dec decimal(20,10)) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_PRECISION_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_precision_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_precision_txt +PREHOOK: query: CREATE TABLE DECIMAL_PRECISION(dec decimal(20,10)) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_PRECISION +POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION(dec decimal(20,10)) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_PRECISION +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_PRECISION SELECT * FROM DECIMAL_PRECISION_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_txt +PREHOOK: Output: default@decimal_precision +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_PRECISION SELECT * FROM DECIMAL_PRECISION_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_txt +POSTHOOK: Output: default@decimal_precision +POSTHOOK: Lineage: decimal_precision.dec SIMPLE [(decimal_precision_txt)decimal_precision_txt.FieldSchema(name:dec, type:decimal(20,10), comment:null), ] +PREHOOK: query: SELECT * FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +0.0000000000 +0.0000000000 +0.0000000000 +0.0000000000 +0 +0.1234567890 +0.1234567890 +1.2345678901 +1.2345678901 +1.2345678901 +12.3456789012 +12.3456789012 +12.3456789012 +123.4567890123 +123.4567890123 +123.4567890123 +1234.5678901235 +1234.5678901235 +1234.5678901235 +12345.6789012346 +12345.6789012346 +123456.7890123456 +123456.7890123457 +1234567.890123456 +1234567.8901234568 +12345678.90123456 +12345678.9012345679 +123456789.0123456 +123456789.0123456789 +1234567890.123456 +1234567890.1234567890 +PREHOOK: query: SELECT dec, dec + 1, dec - 1 FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec, dec + 1, dec - 1 FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +0.0000000000 1.0000000000 -1.0000000000 +0.0000000000 1.0000000000 -1.0000000000 +0.0000000000 1.0000000000 -1.0000000000 +0.0000000000 1.0000000000 -1.0000000000 +0 1 -1 +0.1234567890 1.1234567890 -0.8765432110 +0.1234567890 1.1234567890 -0.8765432110 +1.2345678901 2.2345678901 0.2345678901 +1.2345678901 2.2345678901 0.2345678901 +1.2345678901 2.2345678901 0.2345678901 +12.3456789012 13.3456789012 11.3456789012 +12.3456789012 13.3456789012 11.3456789012 +12.3456789012 13.3456789012 11.3456789012 +123.4567890123 124.4567890123 122.4567890123 +123.4567890123 124.4567890123 122.4567890123 +123.4567890123 124.4567890123 122.4567890123 +1234.5678901235 1235.5678901235 1233.5678901235 +1234.5678901235 1235.5678901235 1233.5678901235 +1234.5678901235 1235.5678901235 1233.5678901235 +12345.6789012346 12346.6789012346 12344.6789012346 +12345.6789012346 12346.6789012346 12344.6789012346 +123456.7890123456 123457.7890123456 123455.7890123456 +123456.7890123457 123457.7890123457 123455.7890123457 +1234567.890123456 1234568.890123456 1234566.890123456 +1234567.8901234568 1234568.8901234568 1234566.8901234568 +12345678.90123456 12345679.90123456 12345677.90123456 +12345678.9012345679 12345679.9012345679 12345677.9012345679 +123456789.0123456 123456790.0123456 123456788.0123456 +123456789.0123456789 123456790.0123456789 123456788.0123456789 +1234567890.123456 1234567891.123456 1234567889.123456 +1234567890.1234567890 1234567891.1234567890 1234567889.1234567890 +PREHOOK: query: SELECT dec, dec * 2, dec / 3 FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec, dec * 2, dec / 3 FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +0.0000000000 0.0000000000 0 +0.0000000000 0.0000000000 0 +0.0000000000 0.0000000000 0 +0.0000000000 0.0000000000 0 +0 0 0 +0.1234567890 0.2469135780 0.041152263 +0.1234567890 0.2469135780 0.041152263 +1.2345678901 2.4691357802 0.411522630033 +1.2345678901 2.4691357802 0.411522630033 +1.2345678901 2.4691357802 0.411522630033 +12.3456789012 24.6913578024 4.1152263004 +12.3456789012 24.6913578024 4.1152263004 +12.3456789012 24.6913578024 4.1152263004 +123.4567890123 246.9135780246 41.1522630041 +123.4567890123 246.9135780246 41.1522630041 +123.4567890123 246.9135780246 41.1522630041 +1234.5678901235 2469.1357802470 411.522630041167 +1234.5678901235 2469.1357802470 411.522630041167 +1234.5678901235 2469.1357802470 411.522630041167 +12345.6789012346 24691.3578024692 4115.226300411533 +12345.6789012346 24691.3578024692 4115.226300411533 +123456.7890123456 246913.5780246912 41152.2630041152 +123456.7890123457 246913.5780246914 41152.263004115233 +1234567.890123456 2469135.780246912 411522.630041152 +1234567.8901234568 2469135.7802469136 411522.630041152267 +12345678.90123456 24691357.80246912 4115226.30041152 +12345678.9012345679 24691357.8024691358 4115226.300411522633 +123456789.0123456 246913578.0246912 41152263.0041152 +123456789.0123456789 246913578.0246913578 41152263.0041152263 +1234567890.123456 2469135780.246912 411522630.041152 +1234567890.1234567890 2469135780.2469135780 411522630.041152263 +PREHOOK: query: SELECT dec, dec / 9 FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec, dec / 9 FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +0.0000000000 0 +0.0000000000 0 +0.0000000000 0 +0.0000000000 0 +0 0 +0.1234567890 0.013717421 +0.1234567890 0.013717421 +1.2345678901 0.137174210011 +1.2345678901 0.137174210011 +1.2345678901 0.137174210011 +12.3456789012 1.371742100133 +12.3456789012 1.371742100133 +12.3456789012 1.371742100133 +123.4567890123 13.717421001367 +123.4567890123 13.717421001367 +123.4567890123 13.717421001367 +1234.5678901235 137.174210013722 +1234.5678901235 137.174210013722 +1234.5678901235 137.174210013722 +12345.6789012346 1371.742100137178 +12345.6789012346 1371.742100137178 +123456.7890123456 13717.421001371733 +123456.7890123457 13717.421001371744 +1234567.890123456 137174.210013717333 +1234567.8901234568 137174.210013717422 +12345678.90123456 1371742.100137173333 +12345678.9012345679 1371742.100137174211 +123456789.0123456 13717421.001371733333 +123456789.0123456789 13717421.0013717421 +1234567890.123456 137174210.013717333333 +1234567890.1234567890 137174210.013717421 +PREHOOK: query: SELECT dec, dec / 27 FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec, dec / 27 FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +0.0000000000 0 +0.0000000000 0 +0.0000000000 0 +0.0000000000 0 +0 0 +0.1234567890 0.0045724736667 +0.1234567890 0.0045724736667 +1.2345678901 0.0457247366704 +1.2345678901 0.0457247366704 +1.2345678901 0.0457247366704 +12.3456789012 0.4572473667111 +12.3456789012 0.4572473667111 +12.3456789012 0.4572473667111 +123.4567890123 4.5724736671222 +123.4567890123 4.5724736671222 +123.4567890123 4.5724736671222 +1234.5678901235 45.7247366712407 +1234.5678901235 45.7247366712407 +1234.5678901235 45.7247366712407 +12345.6789012346 457.2473667123926 +12345.6789012346 457.2473667123926 +123456.7890123456 4572.4736671239111 +123456.7890123457 4572.4736671239148 +1234567.890123456 45724.7366712391111 +1234567.8901234568 45724.7366712391407 +12345678.90123456 457247.3667123911111 +12345678.9012345679 457247.3667123914037 +123456789.0123456 4572473.6671239111111 +123456789.0123456789 4572473.6671239140333 +1234567890.123456 45724736.6712391111111 +1234567890.1234567890 45724736.6712391403333 +PREHOOK: query: SELECT dec, dec * dec FROM DECIMAL_PRECISION ORDER BY dec +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec, dec * dec FROM DECIMAL_PRECISION ORDER BY dec +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +0.0000000000 0.00000000000000000000 +0.0000000000 0.00000000000000000000 +0.0000000000 0.00000000000000000000 +0.0000000000 0.00000000000000000000 +0 0 +0.1234567890 0.01524157875019052100 +0.1234567890 0.01524157875019052100 +1.2345678901 1.52415787526596567801 +1.2345678901 1.52415787526596567801 +1.2345678901 1.52415787526596567801 +12.3456789012 152.41578753153483936144 +12.3456789012 152.41578753153483936144 +12.3456789012 152.41578753153483936144 +123.4567890123 15241.57875322755800955129 +123.4567890123 15241.57875322755800955129 +123.4567890123 15241.57875322755800955129 +1234.5678901235 1524157.87532399036884525225 +1234.5678901235 1524157.87532399036884525225 +1234.5678901235 1524157.87532399036884525225 +12345.6789012346 152415787.53238916034140423716 +12345.6789012346 152415787.53238916034140423716 +123456.7890123456 15241578753.23881726870921383936 +123456.7890123457 15241578753.23884196006701630849 +1234567.890123456 1524157875323.881726870921383936 +1234567.8901234568 1524157875323.88370217954558146624 +12345678.90123456 152415787532388.1726870921383936 +12345678.9012345679 152415787532388.36774881877789971041 +123456789.0123456 15241578753238817.26870921383936 +123456789.0123456789 15241578753238836.75019051998750190521 +1234567890.123456 NULL +1234567890.1234567890 NULL +PREHOOK: query: EXPLAIN SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_precision + Statistics: Num rows: 75 Data size: 3472 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dec (type: decimal(20,10)) + outputColumnNames: dec + Statistics: Num rows: 75 Data size: 3472 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: avg(dec), sum(dec) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: struct), _col1 (type: decimal(30,10)) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: avg(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: decimal(24,14)), _col1 (type: decimal(30,10)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +88499534.57586576220645 2743485571.8518386284 +PREHOOK: query: SELECT dec * cast('12345678901234567890.12345678' as decimal(38,18)) FROM DECIMAL_PRECISION LIMIT 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec * cast('12345678901234567890.12345678' as decimal(38,18)) FROM DECIMAL_PRECISION LIMIT 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL +PREHOOK: query: SELECT * from DECIMAL_PRECISION WHERE dec > cast('1234567890123456789012345678.12345678' as decimal(38,18)) LIMIT 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT * from DECIMAL_PRECISION WHERE dec > cast('1234567890123456789012345678.12345678' as decimal(38,18)) LIMIT 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +PREHOOK: query: SELECT dec * 12345678901234567890.12345678 FROM DECIMAL_PRECISION LIMIT 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT dec * 12345678901234567890.12345678 FROM DECIMAL_PRECISION LIMIT 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +NULL +PREHOOK: query: SELECT MIN(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT MIN(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +12345678901234567890.12345678 +PREHOOK: query: SELECT COUNT(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT COUNT(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +75 +PREHOOK: query: DROP TABLE DECIMAL_PRECISION_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_precision_txt +PREHOOK: Output: default@decimal_precision_txt +POSTHOOK: query: DROP TABLE DECIMAL_PRECISION_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_precision_txt +POSTHOOK: Output: default@decimal_precision_txt +PREHOOK: query: DROP TABLE DECIMAL_PRECISION +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_precision +PREHOOK: Output: default@decimal_precision +POSTHOOK: query: DROP TABLE DECIMAL_PRECISION +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_precision +POSTHOOK: Output: default@decimal_precision diff --git ql/src/test/results/clientpositive/vector_decimal_trailing.q.out ql/src/test/results/clientpositive/vector_decimal_trailing.q.out new file mode 100644 index 0000000..cb0b5a2 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_trailing.q.out @@ -0,0 +1,121 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_TRAILING_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_TRAILING_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_TRAILING +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_TRAILING +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_TRAILING_txt ( + id int, + a decimal(10,4), + b decimal(15,8) + ) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ',' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_TRAILING_txt +POSTHOOK: query: CREATE TABLE DECIMAL_TRAILING_txt ( + id int, + a decimal(10,4), + b decimal(15,8) + ) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ',' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_TRAILING_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv10.txt' INTO TABLE DECIMAL_TRAILING_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_trailing_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv10.txt' INTO TABLE DECIMAL_TRAILING_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_trailing_txt +PREHOOK: query: CREATE TABLE DECIMAL_TRAILING ( + id int, + a decimal(10,4), + b decimal(15,8) + ) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_TRAILING +POSTHOOK: query: CREATE TABLE DECIMAL_TRAILING ( + id int, + a decimal(10,4), + b decimal(15,8) + ) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_TRAILING +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_TRAILING SELECT * FROM DECIMAL_TRAILING_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_trailing_txt +PREHOOK: Output: default@decimal_trailing +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_TRAILING SELECT * FROM DECIMAL_TRAILING_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_trailing_txt +POSTHOOK: Output: default@decimal_trailing +POSTHOOK: Lineage: decimal_trailing.a SIMPLE [(decimal_trailing_txt)decimal_trailing_txt.FieldSchema(name:a, type:decimal(10,4), comment:null), ] +POSTHOOK: Lineage: decimal_trailing.b SIMPLE [(decimal_trailing_txt)decimal_trailing_txt.FieldSchema(name:b, type:decimal(15,8), comment:null), ] +POSTHOOK: Lineage: decimal_trailing.id SIMPLE [(decimal_trailing_txt)decimal_trailing_txt.FieldSchema(name:id, type:int, comment:null), ] +PREHOOK: query: SELECT * FROM DECIMAL_TRAILING ORDER BY id +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_trailing +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_TRAILING ORDER BY id +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_trailing +#### A masked pattern was here #### +0 0 0 +1 0 0 +2 NULL NULL +3 1.0000 1.00000000 +4 10.0000 10.00000000 +5 100.0000 100.00000000 +6 1000.0000 1000.00000000 +7 10000.0000 10000.00000000 +8 100000.0000 100000.00000000 +9 NULL 1000000.00000000 +10 NULL NULL +11 NULL NULL +12 NULL NULL +13 NULL NULL +14 NULL NULL +15 NULL NULL +16 NULL NULL +17 NULL NULL +18 1.0000 1.00000000 +19 10.000 10.0000000 +20 100.00 100.000000 +21 1000.0 1000.00000 +22 100000 10000.0000 +23 0.0000 0.00000000 +24 0.000 0.0000000 +25 0.00 0.000000 +26 0.0 0.00000 +27 0 0.00000 +28 12313.2000 134134.31252500 +29 99999.9990 134134.31242553 +PREHOOK: query: DROP TABLE DECIMAL_TRAILING_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_trailing_txt +PREHOOK: Output: default@decimal_trailing_txt +POSTHOOK: query: DROP TABLE DECIMAL_TRAILING_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_trailing_txt +POSTHOOK: Output: default@decimal_trailing_txt +PREHOOK: query: DROP TABLE DECIMAL_TRAILING +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_trailing +PREHOOK: Output: default@decimal_trailing +POSTHOOK: query: DROP TABLE DECIMAL_TRAILING +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_trailing +POSTHOOK: Output: default@decimal_trailing diff --git ql/src/test/results/clientpositive/vector_decimal_udf.q.out ql/src/test/results/clientpositive/vector_decimal_udf.q.out new file mode 100644 index 0000000..24a87c5 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_udf.q.out @@ -0,0 +1,2657 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_UDF_txt (key decimal(20,10), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_UDF_txt +POSTHOOK: query: CREATE TABLE DECIMAL_UDF_txt (key decimal(20,10), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_UDF_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_udf_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_udf_txt +PREHOOK: query: CREATE TABLE DECIMAL_UDF (key decimal(20,10), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_UDF +POSTHOOK: query: CREATE TABLE DECIMAL_UDF (key decimal(20,10), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_UDF +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF SELECT * FROM DECIMAL_UDF_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf_txt +PREHOOK: Output: default@decimal_udf +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF SELECT * FROM DECIMAL_UDF_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf_txt +POSTHOOK: Output: default@decimal_udf +POSTHOOK: Lineage: decimal_udf.key SIMPLE [(decimal_udf_txt)decimal_udf_txt.FieldSchema(name:key, type:decimal(20,10), comment:null), ] +POSTHOOK: Lineage: decimal_udf.value SIMPLE [(decimal_udf_txt)decimal_udf_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: -- addition +EXPLAIN SELECT key + key FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- addition +EXPLAIN SELECT key + key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key + key) (type: decimal(21,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key + key FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key + key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-8800 +NULL +0.0000000000 +0 +200 +20 +2 +0.2 +0.02 +400 +40 +4 +0 +0.4 +0.04 +0.6 +0.66 +0.666 +-0.6 +-0.66 +-0.666 +2.0 +4 +6.28 +-2.24 +-2.24 +-2.244 +2.24 +2.244 +248.00 +250.4 +-2510.98 +6.28 +6.28 +6.280 +2.0000000000 +-2469135780.2469135780 +2469135780.2469135600 +PREHOOK: query: EXPLAIN SELECT key + value FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key + value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key + value) (type: decimal(21,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key + value FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key + value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +0 +NULL +0.0000000000 +0 +200 +20 +2 +0.1 +0.01 +400 +40 +4 +0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +2.0 +4 +6.14 +-2.12 +-2.12 +-12.122 +2.12 +2.122 +248.00 +250.2 +-2510.49 +6.14 +6.14 +7.140 +2.0000000000 +-2469135780.1234567890 +2469135780.1234567800 +PREHOOK: query: EXPLAIN SELECT key + (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key + (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key + (value / 2)) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key + (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key + (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-2200.0 +NULL +0.0 +0.0 +150.0 +15.0 +1.5 +0.1 +0.01 +300.0 +30.0 +3.0 +0.0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +1.5 +3.0 +4.640000000000001 +-1.62 +-1.62 +-6.622 +1.62 +1.622 +186.0 +187.7 +-1882.99 +4.640000000000001 +4.640000000000001 +5.140000000000001 +1.5 +-1.8518518351234567E9 +1.8518518351234567E9 +PREHOOK: query: EXPLAIN SELECT key + '1.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key + '1.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key + '1.0') (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key + '1.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key + '1.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4399.0 +NULL +1.0 +1.0 +101.0 +11.0 +2.0 +1.1 +1.01 +201.0 +21.0 +3.0 +1.0 +1.2 +1.02 +1.3 +1.33 +1.333 +0.7 +0.6699999999999999 +0.667 +2.0 +3.0 +4.140000000000001 +-0.1200000000000001 +-0.1200000000000001 +-0.12200000000000011 +2.12 +2.122 +125.0 +126.2 +-1254.49 +4.140000000000001 +4.140000000000001 +4.140000000000001 +2.0 +-1.2345678891234567E9 +1.2345678911234567E9 +PREHOOK: query: -- substraction +EXPLAIN SELECT key - key FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- substraction +EXPLAIN SELECT key - key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key - key) (type: decimal(21,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key - key FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key - key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +0 +NULL +0.0000000000 +0 +0 +0 +0 +0.0 +0.00 +0 +0 +0 +0 +0.0 +0.00 +0.0 +0.00 +0.000 +0.0 +0.00 +0.000 +0.0 +0 +0.00 +0.00 +0.00 +0.000 +0.00 +0.000 +0.00 +0.0 +0.00 +0.00 +0.00 +0.000 +0.0000000000 +0.0000000000 +0.0000000000 +PREHOOK: query: EXPLAIN SELECT key - value FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key - value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key - value) (type: decimal(21,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key - value FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key - value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-8800 +NULL +0.0000000000 +0 +0 +0 +0 +0.1 +0.01 +0 +0 +0 +0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +0.0 +0 +0.14 +-0.12 +-0.12 +9.878 +0.12 +0.122 +0.00 +0.2 +-0.49 +0.14 +0.14 +-0.860 +0.0000000000 +-0.1234567890 +0.1234567800 +PREHOOK: query: EXPLAIN SELECT key - (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key - (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key - (value / 2)) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key - (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key - (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-6600.0 +NULL +0.0 +0.0 +50.0 +5.0 +0.5 +0.1 +0.01 +100.0 +10.0 +1.0 +0.0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +0.5 +1.0 +1.6400000000000001 +-0.6200000000000001 +-0.6200000000000001 +4.378 +0.6200000000000001 +0.6220000000000001 +62.0 +62.7 +-627.99 +1.6400000000000001 +1.6400000000000001 +1.1400000000000001 +0.5 +-6.172839451234567E8 +6.172839451234567E8 +PREHOOK: query: EXPLAIN SELECT key - '1.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key - '1.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key - '1.0') (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key - '1.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key - '1.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4401.0 +NULL +-1.0 +-1.0 +99.0 +9.0 +0.0 +-0.9 +-0.99 +199.0 +19.0 +1.0 +-1.0 +-0.8 +-0.98 +-0.7 +-0.6699999999999999 +-0.667 +-1.3 +-1.33 +-1.333 +0.0 +1.0 +2.14 +-2.12 +-2.12 +-2.122 +0.1200000000000001 +0.12200000000000011 +123.0 +124.2 +-1256.49 +2.14 +2.14 +2.14 +0.0 +-1.2345678911234567E9 +1.2345678891234567E9 +PREHOOK: query: -- multiplication +EXPLAIN SELECT key * key FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- multiplication +EXPLAIN SELECT key * key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key * key) (type: decimal(38,20)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key * key FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key * key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +19360000 +NULL +0.00000000000000000000 +0 +10000 +100 +1 +0.01 +0.0001 +40000 +400 +4 +0 +0.04 +0.0004 +0.09 +0.1089 +0.110889 +0.09 +0.1089 +0.110889 +1.00 +4 +9.8596 +1.2544 +1.2544 +1.258884 +1.2544 +1.258884 +15376.0000 +15675.04 +1576255.1401 +9.8596 +9.8596 +9.859600 +1.00000000000000000000 +NULL +NULL +PREHOOK: query: EXPLAIN SELECT key, value FROM DECIMAL_UDF where key * value > 0 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key, value FROM DECIMAL_UDF where key * value > 0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key * value) > 0) (type: boolean) + Statistics: Num rows: 12 Data size: 1356 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 1356 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 1356 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key, value FROM DECIMAL_UDF where key * value > 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key, value FROM DECIMAL_UDF where key * value > 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +100 100 +10 10 +1 1 +200 200 +20 20 +2 2 +1.0 1 +2 2 +3.14 3 +-1.12 -1 +-1.12 -1 +-1.122 -11 +1.12 1 +1.122 1 +124.00 124 +125.2 125 +-1255.49 -1255 +3.14 3 +3.14 3 +3.140 4 +1.0000000000 1 +-1234567890.1234567890 -1234567890 +1234567890.1234567800 1234567890 +PREHOOK: query: EXPLAIN SELECT key * value FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key * value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key * value) (type: decimal(31,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key * value FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key * value FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-19360000 +NULL +0.0000000000 +0 +10000 +100 +1 +0.0 +0.00 +40000 +400 +4 +0 +0.0 +0.00 +0.0 +0.00 +0.000 +0.0 +0.00 +0.000 +1.0 +4 +9.42 +1.12 +1.12 +12.342 +1.12 +1.122 +15376.00 +15650.0 +1575639.95 +9.42 +9.42 +12.560 +1.0000000000 +1524157875171467887.5019052100 +1524157875171467876.3907942000 +PREHOOK: query: EXPLAIN SELECT key * (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key * (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key * (value / 2)) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key * (value/2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key * (value/2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-9680000.0 +NULL +0.0 +0.0 +5000.0 +50.0 +0.5 +0.0 +0.0 +20000.0 +200.0 +2.0 +0.0 +0.0 +0.0 +0.0 +0.0 +0.0 +-0.0 +-0.0 +-0.0 +0.5 +2.0 +4.71 +0.56 +0.56 +6.171 +0.56 +0.561 +7688.0 +7825.0 +787819.975 +4.71 +4.71 +6.28 +0.5 +7.6207893758573389E17 +7.6207893758573389E17 +PREHOOK: query: EXPLAIN SELECT key * '2.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key * '2.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key * '2.0') (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key * '2.0' FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key * '2.0' FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-8800.0 +NULL +0.0 +0.0 +200.0 +20.0 +2.0 +0.2 +0.02 +400.0 +40.0 +4.0 +0.0 +0.4 +0.04 +0.6 +0.66 +0.666 +-0.6 +-0.66 +-0.666 +2.0 +4.0 +6.28 +-2.24 +-2.24 +-2.244 +2.24 +2.244 +248.0 +250.4 +-2510.98 +6.28 +6.28 +6.28 +2.0 +-2.4691357802469134E9 +2.4691357802469134E9 +PREHOOK: query: -- division +EXPLAIN SELECT key / 0 FROM DECIMAL_UDF limit 1 +PREHOOK: type: QUERY +POSTHOOK: query: -- division +EXPLAIN SELECT key / 0 FROM DECIMAL_UDF limit 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key / 0) (type: decimal(22,12)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 1 + Statistics: Num rows: 1 Data size: 113 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 113 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key / 0 FROM DECIMAL_UDF limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key / 0 FROM DECIMAL_UDF limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +NULL +PREHOOK: query: EXPLAIN SELECT key / NULL FROM DECIMAL_UDF limit 1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key / NULL FROM DECIMAL_UDF limit 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key / null) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 1 + Statistics: Num rows: 1 Data size: 113 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 113 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key / NULL FROM DECIMAL_UDF limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key / NULL FROM DECIMAL_UDF limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +NULL +PREHOOK: query: EXPLAIN SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key <> 0) (type: boolean) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key / key) (type: decimal(38,24)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +PREHOOK: query: EXPLAIN SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (value <> 0) (type: boolean) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key / value) (type: decimal(31,21)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-1 +1 +1 +1 +1 +1 +1 +1 +1 +1.046666666666666666667 +1.12 +1.12 +0.102 +1.12 +1.122 +1 +1.0016 +1.000390438247011952191 +1.046666666666666666667 +1.046666666666666666667 +0.785 +1 +1.0000000001 +1.000000000099999992710 +PREHOOK: query: EXPLAIN SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (value <> 0) (type: boolean) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (key / (value / 2)) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-2.0 +2.0 +2.0 +2.0 +2.0 +2.0 +2.0 +2.0 +2.0 +2.0933333333333333 +2.24 +2.24 +0.20400000000000001 +2.24 +2.244 +2.0 +2.0032 +2.000780876494024 +2.0933333333333333 +2.0933333333333333 +1.57 +2.0 +2.0000000002 +2.0000000002 +PREHOOK: query: EXPLAIN SELECT 1 + (key / '2.0') FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT 1 + (key / '2.0') FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (1 + (key / '2.0')) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT 1 + (key / '2.0') FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT 1 + (key / '2.0') FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-2199.0 +NULL +1.0 +1.0 +51.0 +6.0 +1.5 +1.05 +1.005 +101.0 +11.0 +2.0 +1.0 +1.1 +1.01 +1.15 +1.165 +1.1665 +0.85 +0.835 +0.8335 +1.5 +2.0 +2.5700000000000003 +0.43999999999999995 +0.43999999999999995 +0.43899999999999995 +1.56 +1.561 +63.0 +63.6 +-626.745 +2.5700000000000003 +2.5700000000000003 +2.5700000000000003 +1.5 +-6.172839440617284E8 +6.172839460617284E8 +PREHOOK: query: -- abs +EXPLAIN SELECT abs(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- abs +EXPLAIN SELECT abs(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: abs(key) (type: decimal(38,18)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT abs(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT abs(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +4400 +NULL +0.0000000000 +0 +100 +10 +1 +0.1 +0.01 +200 +20 +2 +0 +0.2 +0.02 +0.3 +0.33 +0.333 +0.3 +0.33 +0.333 +1.0 +2 +3.14 +1.12 +1.12 +1.122 +1.12 +1.122 +124.00 +125.2 +1255.49 +3.14 +3.14 +3.140 +1.0000000000 +1234567890.1234567890 +1234567890.1234567800 +PREHOOK: query: -- avg +EXPLAIN SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value +PREHOOK: type: QUERY +POSTHOOK: query: -- avg +EXPLAIN SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: int), key (type: decimal(20,10)) + outputColumnNames: value, key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(key), count(key), avg(key) + keys: value (type: int) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: decimal(30,10)), _col2 (type: bigint), _col3 (type: struct) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), count(VALUE._col1), avg(VALUE._col2) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), (_col1 / _col2) (type: decimal(38,23)), _col3 (type: decimal(24,14)), _col1 (type: decimal(30,10)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: decimal(38,23)), _col2 (type: decimal(24,14)), _col3 (type: decimal(30,10)) + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: decimal(38,23)), VALUE._col1 (type: decimal(24,14)), VALUE._col2 (type: decimal(30,10)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-1234567890 -1234567890.123456789 -1234567890.123456789 -1234567890.1234567890 +-1255 -1255.49 -1255.49 -1255.49 +-11 -1.122 -1.122 -1.122 +-1 -1.12 -1.12 -2.24 +0 0.02538461538461538461538 0.02538461538462 0.3300000000 +1 1.0484 1.0484 5.2420000000 +2 2 2 4 +3 3.14 3.14 9.42 +4 3.14 3.14 3.140 +10 10 10 10 +20 20 20 20 +100 100 100 100 +124 124 124 124.00 +125 125.2 125.2 125.2 +200 200 200 200 +4400 -4400 -4400 -4400 +1234567890 1234567890.12345678 1234567890.12345678 1234567890.1234567800 +PREHOOK: query: -- negative +EXPLAIN SELECT -key FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- negative +EXPLAIN SELECT -key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (- key) (type: decimal(20,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT -key FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT -key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +4400 +NULL +0.0000000000 +0 +-100 +-10 +-1 +-0.1 +-0.01 +-200 +-20 +-2 +0 +-0.2 +-0.02 +-0.3 +-0.33 +-0.333 +0.3 +0.33 +0.333 +-1.0 +-2 +-3.14 +1.12 +1.12 +1.122 +-1.12 +-1.122 +-124.00 +-125.2 +1255.49 +-3.14 +-3.14 +-3.140 +-1.0000000000 +1234567890.1234567890 +-1234567890.1234567800 +PREHOOK: query: -- positive +EXPLAIN SELECT +key FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- positive +EXPLAIN SELECT +key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + ListSink + +PREHOOK: query: SELECT +key FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT +key FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4400 +NULL +0.0000000000 +0 +100 +10 +1 +0.1 +0.01 +200 +20 +2 +0 +0.2 +0.02 +0.3 +0.33 +0.333 +-0.3 +-0.33 +-0.333 +1.0 +2 +3.14 +-1.12 +-1.12 +-1.122 +1.12 +1.122 +124.00 +125.2 +-1255.49 +3.14 +3.14 +3.140 +1.0000000000 +-1234567890.1234567890 +1234567890.1234567800 +PREHOOK: query: -- ceiling +EXPlAIN SELECT CEIL(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- ceiling +EXPlAIN SELECT CEIL(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ceil(key) (type: decimal(11,0)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT CEIL(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT CEIL(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4400 +NULL +0 +0 +100 +10 +1 +1 +1 +200 +20 +2 +0 +1 +1 +1 +1 +1 +0 +0 +0 +1 +2 +4 +-1 +-1 +-1 +2 +2 +124 +126 +-1255 +4 +4 +4 +1 +-1234567890 +1234567891 +PREHOOK: query: -- floor +EXPLAIN SELECT FLOOR(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- floor +EXPLAIN SELECT FLOOR(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: floor(key) (type: decimal(11,0)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT FLOOR(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT FLOOR(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4400 +NULL +0 +0 +100 +10 +1 +0 +0 +200 +20 +2 +0 +0 +0 +0 +0 +0 +-1 +-1 +-1 +1 +2 +3 +-2 +-2 +-2 +1 +1 +124 +125 +-1256 +3 +3 +3 +1 +-1234567891 +1234567890 +PREHOOK: query: -- round +EXPLAIN SELECT ROUND(key, 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- round +EXPLAIN SELECT ROUND(key, 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: round(key, 2) (type: decimal(13,2)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT ROUND(key, 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT ROUND(key, 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-4400.00 +NULL +0.00 +0.00 +100.00 +10.00 +1.00 +0.10 +0.01 +200.00 +20.00 +2.00 +0.00 +0.20 +0.02 +0.30 +0.33 +0.33 +-0.30 +-0.33 +-0.33 +1.00 +2.00 +3.14 +-1.12 +-1.12 +-1.12 +1.12 +1.12 +124.00 +125.20 +-1255.49 +3.14 +3.14 +3.14 +1.00 +-1234567890.12 +1234567890.12 +PREHOOK: query: -- power +EXPLAIN SELECT POWER(key, 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- power +EXPLAIN SELECT POWER(key, 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: power(key, 2) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT POWER(key, 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT POWER(key, 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +1.936E7 +NULL +0.0 +0.0 +10000.0 +100.0 +1.0 +0.010000000000000002 +1.0E-4 +40000.0 +400.0 +4.0 +0.0 +0.04000000000000001 +4.0E-4 +0.09 +0.10890000000000001 +0.11088900000000002 +0.09 +0.10890000000000001 +0.11088900000000002 +1.0 +4.0 +9.8596 +1.2544000000000002 +1.2544000000000002 +1.2588840000000003 +1.2544000000000002 +1.2588840000000003 +15376.0 +15675.04 +1576255.1401 +9.8596 +9.8596 +9.8596 +1.0 +1.52415787532388352E18 +1.52415787532388352E18 +PREHOOK: query: -- modulo +EXPLAIN SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- modulo +EXPLAIN SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ((key + 1) % (key / 2)) (type: decimal(22,12)) + outputColumnNames: _col0 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-2199 +NULL +NULL +NULL +1 +1 +0.0 +0.00 +0.000 +1 +1 +0 +NULL +0.0 +0.00 +0.10 +0.010 +0.0010 +0.10 +0.010 +0.0010 +0.0 +0 +1.00 +-0.12 +-0.12 +-0.122 +0.44 +0.439 +1.00 +1.0 +-626.745 +1.00 +1.00 +1.000 +0.0000000000 +-617283944.0617283945 +1.0000000000 +PREHOOK: query: -- stddev, var +EXPLAIN SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value +PREHOOK: type: QUERY +POSTHOOK: query: -- stddev, var +EXPLAIN SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: int), key (type: decimal(20,10)) + outputColumnNames: value, key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: stddev(key), variance(key) + keys: value (type: int) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: struct), _col2 (type: struct) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: stddev(VALUE._col0), variance(VALUE._col1) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: double), _col2 (type: double) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-1234567890 0.0 0.0 +-1255 0.0 0.0 +-11 0.0 0.0 +-1 0.0 0.0 +0 0.22561046704494161 0.050900082840236685 +1 0.05928102563215321 0.0035142400000000066 +2 0.0 0.0 +3 0.0 0.0 +4 0.0 0.0 +10 0.0 0.0 +20 0.0 0.0 +100 0.0 0.0 +124 0.0 0.0 +125 0.0 0.0 +200 0.0 0.0 +4400 0.0 0.0 +1234567890 0.0 0.0 +PREHOOK: query: -- stddev_samp, var_samp +EXPLAIN SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value +PREHOOK: type: QUERY +POSTHOOK: query: -- stddev_samp, var_samp +EXPLAIN SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: int), key (type: decimal(20,10)) + outputColumnNames: value, key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: stddev_samp(key), var_samp(key) + keys: value (type: int) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: struct), _col2 (type: struct) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: stddev_samp(VALUE._col0), var_samp(VALUE._col1) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: double), _col2 (type: double) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT value, stddev_samp(key), var_samp(key) FROM DECIMAL_UDF GROUP BY value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-1234567890 0.0 0.0 +-1255 0.0 0.0 +-11 0.0 0.0 +-1 0.0 0.0 +0 0.2348228191855647 0.055141756410256405 +1 0.06627820154470102 0.004392800000000008 +2 0.0 0.0 +3 0.0 0.0 +4 0.0 0.0 +10 0.0 0.0 +20 0.0 0.0 +100 0.0 0.0 +124 0.0 0.0 +125 0.0 0.0 +200 0.0 0.0 +4400 0.0 0.0 +1234567890 0.0 0.0 +PREHOOK: query: -- histogram +EXPLAIN SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- histogram +EXPLAIN SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)) + outputColumnNames: key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: histogram_numeric(key, 3) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + value expressions: _col0 (type: array) + Reduce Operator Tree: + Group By Operator + aggregations: histogram_numeric(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: _col0 (type: array>) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT histogram_numeric(key, 3) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +[{"x":-1.2345678901234567E9,"y":1.0},{"x":-144.50057142857142,"y":35.0},{"x":1.2345678901234567E9,"y":1.0}] +PREHOOK: query: -- min +EXPLAIN SELECT MIN(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- min +EXPLAIN SELECT MIN(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)) + outputColumnNames: key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: min(key) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(20,10)) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: min(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: decimal(20,10)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT MIN(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT MIN(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +-1234567890.1234567890 +PREHOOK: query: -- max +EXPLAIN SELECT MAX(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- max +EXPLAIN SELECT MAX(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)) + outputColumnNames: key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: max(key) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(20,10)) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: max(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: decimal(20,10)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT MAX(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT MAX(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +1234567890.1234567800 +PREHOOK: query: -- count +EXPLAIN SELECT COUNT(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +POSTHOOK: query: -- count +EXPLAIN SELECT COUNT(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: decimal(20,10)) + outputColumnNames: key + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(key) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT COUNT(key) FROM DECIMAL_UDF +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +POSTHOOK: query: SELECT COUNT(key) FROM DECIMAL_UDF +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf +#### A masked pattern was here #### +37 +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_udf_txt +PREHOOK: Output: default@decimal_udf_txt +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_udf_txt +POSTHOOK: Output: default@decimal_udf_txt +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_udf +PREHOOK: Output: default@decimal_udf +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_udf +POSTHOOK: Output: default@decimal_udf diff --git ql/src/test/results/clientpositive/vector_decimal_udf2.q.out ql/src/test/results/clientpositive/vector_decimal_udf2.q.out new file mode 100644 index 0000000..0495761 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_udf2.q.out @@ -0,0 +1,181 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_UDF2_txt (key decimal(20,10), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_UDF2_txt +POSTHOOK: query: CREATE TABLE DECIMAL_UDF2_txt (key decimal(20,10), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_UDF2_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF2_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_udf2_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF2_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_udf2_txt +PREHOOK: query: CREATE TABLE DECIMAL_UDF2 (key decimal(20,10), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@DECIMAL_UDF2 +POSTHOOK: query: CREATE TABLE DECIMAL_UDF2 (key decimal(20,10), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@DECIMAL_UDF2 +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF2 SELECT * FROM DECIMAL_UDF2_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_txt +PREHOOK: Output: default@decimal_udf2 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF2 SELECT * FROM DECIMAL_UDF2_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_txt +POSTHOOK: Output: default@decimal_udf2 +POSTHOOK: Lineage: decimal_udf2.key SIMPLE [(decimal_udf2_txt)decimal_udf2_txt.FieldSchema(name:key, type:decimal(20,10), comment:null), ] +POSTHOOK: Lineage: decimal_udf2.value SIMPLE [(decimal_udf2_txt)decimal_udf2_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: EXPLAIN +SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2 WHERE key = 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2 WHERE key = 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf2 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key = 10) (type: boolean) + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: acos(key) (type: double), asin(key) (type: double), atan(key) (type: double), cos(key) (type: double), sin(key) (type: double), tan(key) (type: double), radians(key) (type: double) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2 WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2 WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2 +#### A masked pattern was here #### +NaN NaN 1.4711276743037347 -0.8390715290764524 -0.5440211108893698 0.6483608274590866 0.17453292519943295 +PREHOOK: query: EXPLAIN +SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2 WHERE key = 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2 WHERE key = 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf2 + Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key = 10) (type: boolean) + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: exp(key) (type: double), ln(key) (type: double), log(key) (type: double), log(key, key) (type: double), log(key, value) (type: double), log(value, key) (type: double), log10(key) (type: double), sqrt(key) (type: double) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2 WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2 WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2 +#### A masked pattern was here #### +22026.465794806718 2.302585092994046 2.302585092994046 1.0 1.0 1.0 1.0 3.1622776601683795 +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_udf2_txt +PREHOOK: Output: default@decimal_udf2_txt +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_udf2_txt +POSTHOOK: Output: default@decimal_udf2_txt +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_udf2 +PREHOOK: Output: default@decimal_udf2 +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_udf2 +POSTHOOK: Output: default@decimal_udf2 diff --git serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java index ad84c70..6ab64e5 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java +++ serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java @@ -62,6 +62,10 @@ public void set(HiveDecimal value) { set(value.unscaledValue().toByteArray(), value.scale()); } + public void set(HiveDecimal value, int maxPrecision, int maxScale) { + set(HiveDecimal.enforcePrecisionScale(value, maxPrecision, maxScale)); + } + public void set(HiveDecimalWritable writable) { set(writable.getHiveDecimal()); }