diff --git ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnDecimal.txt index 699b7c5..1609428 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnDecimal.txt @@ -42,9 +42,11 @@ public class extends VectorExpression { this.colNum1 = colNum1; this.colNum2 = colNum2; this.outputColumn = outputColumn; + this.outputType = "decimal"; } public () { + this.outputType = "decimal"; } @Override @@ -144,11 +146,6 @@ public class extends VectorExpression { return outputColumn; } - @Override - public String getOutputType() { - return "decimal"; - } - public int getColNum1() { return colNum1; } diff --git ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarDecimal.txt index 99366ca..15feb07 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarDecimal.txt @@ -42,9 +42,11 @@ public class extends VectorExpression { this.colNum = colNum; this.value = value; this.outputColumn = outputColumn; + this.outputType = "decimal"; } public () { + this.outputType = "decimal"; } @Override @@ -128,11 +130,6 @@ public class extends VectorExpression { return outputColumn; } - @Override - public String getOutputType() { - return "decimal"; - } - public int getColNum() { return colNum; } diff --git ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumnDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumnDecimal.txt index 2aa4152..418caac 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumnDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumnDecimal.txt @@ -42,9 +42,11 @@ public class extends VectorExpression { this.colNum1 = colNum1; this.colNum2 = colNum2; this.outputColumn = outputColumn; + this.outputType = "decimal"; } public () { + this.outputType = "decimal"; } @Override @@ -136,11 +138,6 @@ public class extends VectorExpression { return outputColumn; } - @Override - public String getOutputType() { - return "decimal"; - } - public int getColNum1() { return colNum1; } diff --git ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalarDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalarDecimal.txt index 2e84334..dbdb8f6 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalarDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalarDecimal.txt @@ -43,9 +43,11 @@ public class extends VectorExpression { this.colNum = colNum; this.value = value; this.outputColumn = outputColumn; + this.outputType = "decimal"; } public () { + this.outputType = "decimal"; } @Override @@ -140,11 +142,6 @@ public class extends VectorExpression { return outputColumn; } - @Override - public String getOutputType() { - return "decimal"; - } - public int getColNum() { return colNum; } diff --git ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnDecimal.txt index 9578d34..967e0d4 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnDecimal.txt @@ -42,9 +42,11 @@ public class extends VectorExpression { this.colNum = colNum; this.value = value; this.outputColumn = outputColumn; + this.outputType = "decimal"; } public () { + this.outputType = "decimal"; } @Override @@ -125,11 +127,6 @@ public class extends VectorExpression { return outputColumn; } - @Override - public String getOutputType() { - return "decimal"; - } - public int getColNum() { return colNum; } diff --git ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumnDecimal.txt ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumnDecimal.txt index 6ee9d5f..84ed925 100644 --- ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumnDecimal.txt +++ ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumnDecimal.txt @@ -42,9 +42,11 @@ public class extends VectorExpression { this.colNum = colNum; this.value = value; this.outputColumn = outputColumn; + this.outputType = "decimal"; } public () { + this.outputType = "decimal"; } @Override @@ -127,11 +129,6 @@ public class extends VectorExpression { return outputColumn; } - @Override - public String getOutputType() { - return "decimal"; - } - public int getColNum() { return colNum; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java index 1c70387..d9855c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java @@ -48,7 +48,11 @@ public int getValue() { } public static ArgumentType getType(String inType) { - return valueOf(VectorizationContext.getNormalizedTypeName(inType).toUpperCase()); + String type = VectorizationContext.getNormalizedTypeName(inType); + if (VectorizationContext.decimalTypePattern.matcher(type.toLowerCase()).matches()) { + type = "decimal"; + } + return valueOf(type.toUpperCase()); } } @@ -186,6 +190,32 @@ private Descriptor(Mode mode, int argCount, ArgumentType[] argTypes, InputExpres this.exprTypes = exprTypes.clone(); this.argCount = argCount; } + + @Override + public String toString() { + StringBuilder b = new StringBuilder("Argument Count = "); + b.append(argCount); + b.append(", mode = "); + b.append(mode); + b.append(", Argument Types = {"); + for (int i = 0; i < argCount; i++) { + if (i == 0) { + b.append(","); + } + b.append(argTypes[i]); + } + b.append("}"); + + b.append(", Input Expression Types = {"); + for (int i = 0; i < argCount; i++) { + if (i == 0) { + b.append(","); + } + b.append(exprTypes[i]); + } + b.append("}"); + return b.toString(); + } } public Class getVectorExpressionClass(Class udf, Descriptor descriptor) throws HiveException { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index f5ab731..02b8feb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.exec.vector; import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; @@ -27,9 +28,12 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; import org.apache.hadoop.hive.ql.exec.FunctionInfo; @@ -89,7 +93,11 @@ import org.apache.hadoop.hive.ql.udf.UDFToString; import org.apache.hadoop.hive.ql.udf.generic.*; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter; +import org.apache.hadoop.hive.serde2.typeinfo.*; /** * Context class for vectorization execution. @@ -109,11 +117,32 @@ private final int firstOutputColumnIndex; private final Mode operatorMode = Mode.PROJECTION; + public static final Pattern decimalTypePattern = Pattern.compile("decimal.*"); + //Map column number to type private final OutputColumnManager ocm; private String fileKey = null; + private static Set> castExpressionUdfs = new HashSet>(); + static { + castExpressionUdfs.add(GenericUDFToDecimal.class); + castExpressionUdfs.add(GenericUDFToBinary.class); + castExpressionUdfs.add(GenericUDFToDate.class); + castExpressionUdfs.add(GenericUDFToUnixTimeStamp.class); + castExpressionUdfs.add(GenericUDFToUtcTimestamp.class); + castExpressionUdfs.add(GenericUDFToChar.class); + castExpressionUdfs.add(GenericUDFToVarchar.class); + castExpressionUdfs.add(UDFToByte.class); + castExpressionUdfs.add(UDFToBoolean.class); + castExpressionUdfs.add(UDFToDouble.class); + castExpressionUdfs.add(UDFToFloat.class); + castExpressionUdfs.add(UDFToString.class); + castExpressionUdfs.add(UDFToInteger.class); + castExpressionUdfs.add(UDFToLong.class); + castExpressionUdfs.add(UDFToShort.class); + } + public VectorizationContext(Map columnMap, int initialOutputCol) { this.columnMap = columnMap; @@ -168,6 +197,8 @@ int allocateOutputColumn(String columnType) { private int allocateOutputColumnInternal(String columnType) { for (int i = 0; i < outputColCount; i++) { + + // Re-use an existing, available column of the same required type. if (usedOutputColumns.contains(i) || !(outputColumnsTypes)[i].equalsIgnoreCase(columnType)) { continue; @@ -259,11 +290,15 @@ public VectorExpression getVectorExpression(ExprNodeDesc exprDesc, Mode mode) th if (isCustomUDF(expr) || isNonVectorizedPathUDF(expr)) { ve = getCustomUDFExpression(expr); } else { + // Add cast expression if needed + List childExpressions = getChildExpressionsWithImplicitCast(expr.getGenericUDF(), + exprDesc.getChildren(), exprDesc.getTypeInfo()); ve = getGenericUdfVectorExpression(expr.getGenericUDF(), - expr.getChildren(), mode); + childExpressions, mode, exprDesc.getTypeInfo()); } } else if (exprDesc instanceof ExprNodeConstantDesc) { - ve = getConstantVectorExpression((ExprNodeConstantDesc) exprDesc, mode); + ve = getConstantVectorExpression(((ExprNodeConstantDesc) exprDesc).getValue(), exprDesc.getTypeInfo(), + mode); } if (ve == null) { throw new HiveException("Could not vectorize expression: "+exprDesc.getName()); @@ -271,6 +306,132 @@ public VectorExpression getVectorExpression(ExprNodeDesc exprDesc, Mode mode) th return ve; } + private List getChildExpressionsWithImplicitCast(GenericUDF genericUDF, + List children, TypeInfo returnType) { + if (isCastExpression(genericUDF)) { + // No implicit cast needed + return children; + } + if (children == null) { + return null; + } + TypeInfo commonType; + if (genericUDF instanceof GenericUDFBaseCompare) { + // Comparison rules + TypeInfo tLeft = children.get(0).getTypeInfo(); + TypeInfo tRight = children.get(1).getTypeInfo(); + commonType = FunctionRegistry.getCommonClassForComparison(tLeft, tRight); + if (commonType == null) { + commonType = returnType; + } + } else { + // The children type should be converted to return type + commonType = returnType; + } + List childrenWithCasts = new ArrayList(); + boolean atleastOneCastNeeded = false; + for (ExprNodeDesc child : children) { + ExprNodeDesc castExpression = getImplicitCastExpression(child, commonType); + if (castExpression != null) { + atleastOneCastNeeded = true; + childrenWithCasts.add(castExpression); + } else { + childrenWithCasts.add(child); + } + } + if (atleastOneCastNeeded) { + return childrenWithCasts; + } else { + return children; + } + } + + private boolean isCastExpression(GenericUDF genericUDF) { + boolean ret = castExpressionUdfs.contains(genericUDF.getClass()); + if (ret) { + return ret; + } else if (genericUDF instanceof GenericUDFBridge) { + return castExpressionUdfs.contains(((GenericUDFBridge) genericUDF).getUdfClass()); + } + return false; + } + + private ExprNodeDesc getImplicitCastExpression(ExprNodeDesc child, TypeInfo castType) { + TypeInfo inputTypeInfo = child.getTypeInfo(); + String inputTypeString = inputTypeInfo.getTypeName(); + String castTypeString = castType.getTypeName(); + boolean inputTypeDecimal = false; + boolean castTypeDecimal = false; + if (decimalTypePattern.matcher(inputTypeString).matches()) { + inputTypeDecimal = true; + } + if (decimalTypePattern.matcher(castTypeString).matches()) { + castTypeDecimal = true; + } + + if (castTypeDecimal && !inputTypeDecimal) { + // Cast the input to decimal + GenericUDFToDecimal castToDecimalUDF = new GenericUDFToDecimal(); + List children = new ArrayList(); + children.add(child); + ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, castToDecimalUDF, children); + return desc; + } else if (!castTypeDecimal && inputTypeDecimal) { + // Cast decimal input to returnType + UDF udfClass = null; + GenericUDF genericUdf = null; + PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = + ((PrimitiveTypeInfo) castType).getPrimitiveCategory(); + switch (((PrimitiveTypeInfo) castType).getPrimitiveCategory()) { + case BYTE: + udfClass = new UDFToByte(); + break; + case SHORT: + udfClass = new UDFToShort(); + break; + case INT: + udfClass = new UDFToInteger(); + break; + case LONG: + udfClass = new UDFToLong(); + break; + case FLOAT: + udfClass = new UDFToFloat(); + break; + case DOUBLE: + udfClass = new UDFToDouble(); + break; + case STRING: + udfClass = new UDFToString(); + break; + case BOOLEAN: + udfClass = new UDFToBoolean(); + break; + case DATE: + genericUdf = new GenericUDFToDate(); + break; + case TIMESTAMP: + genericUdf = new GenericUDFToUnixTimeStamp(); + break; + case BINARY: + genericUdf = new GenericUDFToBinary(); + break; + } + if (genericUdf == null) { + genericUdf = new GenericUDFBridge(); + ((GenericUDFBridge) genericUdf).setUdfClassName(udfClass.getClass().getName()); + } + List children = new ArrayList(); + children.add(child); + ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, genericUdf, children); + return desc; + } + // No cast needed + return null; + } + + + /* Return true if this is one of a small set of functions for which * it is significantly easier to use the old code path in vectorized * mode instead of implementing a new, optimized VectorExpression. @@ -391,39 +552,42 @@ private ExprNodeDesc foldConstantsForUnaryExpression(ExprNodeDesc exprDesc) thro return constantFoldedChildren; } - private VectorExpression getConstantVectorExpression(ExprNodeConstantDesc exprDesc, Mode mode) - throws HiveException { - String type = exprDesc.getTypeString(); + private VectorExpression getConstantVectorExpression(Object constantValue, TypeInfo typeInfo, + Mode mode) throws HiveException { + String type = typeInfo.getTypeName(); String colVectorType = getNormalizedTypeName(type); int outCol = -1; if (mode == Mode.PROJECTION) { outCol = ocm.allocateOutputColumn(colVectorType); } - if (type.equalsIgnoreCase("long") || type.equalsIgnoreCase("int") || + if (decimalTypePattern.matcher(type).matches()) { + VectorExpression ve = new ConstantVectorExpression(outCol, (Decimal128) constantValue); + ve.setOutputType(typeInfo.getTypeName()); + return ve; + } else if (type.equalsIgnoreCase("long") || type.equalsIgnoreCase("int") || type.equalsIgnoreCase("short") || type.equalsIgnoreCase("byte")) { return new ConstantVectorExpression(outCol, - ((Number) exprDesc.getValue()).longValue()); + ((Number) constantValue).longValue()); } else if (type.equalsIgnoreCase("double") || type.equalsIgnoreCase("float")) { - return new ConstantVectorExpression(outCol, ((Number) exprDesc.getValue()).doubleValue()); + return new ConstantVectorExpression(outCol, ((Number) constantValue).doubleValue()); } else if (type.equalsIgnoreCase("string")) { - return new ConstantVectorExpression(outCol, ((String) exprDesc.getValue()).getBytes()); + return new ConstantVectorExpression(outCol, ((String) constantValue).getBytes()); } else if (type.equalsIgnoreCase("boolean")) { if (mode == Mode.FILTER) { - if (((Boolean) exprDesc.getValue()).booleanValue()) { + if (((Boolean) constantValue).booleanValue()) { return new FilterConstantBooleanVectorExpression(1); } else { return new FilterConstantBooleanVectorExpression(0); } } else { - if (((Boolean) exprDesc.getValue()).booleanValue()) { + if (((Boolean) constantValue).booleanValue()) { return new ConstantVectorExpression(outCol, 1); } else { return new ConstantVectorExpression(outCol, 0); } } - } else { - throw new HiveException("Unsupported constant type: "+type.toString()); } + throw new HiveException("Unsupported constant type: "+type.toString()); } /** @@ -455,8 +619,8 @@ private VectorExpression getIdentityExpression(List childExprList) return expr; } - private VectorExpression getVectorExpressionForUdf(Class udf, List childExpr, Mode mode) - throws HiveException { + private VectorExpression getVectorExpressionForUdf(Class udf, List childExpr, Mode mode, + TypeInfo returnType) throws HiveException { int numChildren = (childExpr == null) ? 0 : childExpr.size(); if (numChildren > VectorExpressionDescriptor.MAX_NUM_ARGUMENTS) { return null; @@ -483,11 +647,11 @@ private VectorExpression getVectorExpressionForUdf(Class udf, List vectorClass, List childExpr, - Mode childrenMode) throws HiveException { + private VectorExpression createVectorExpression(Class vectorClass, + List childExpr, Mode childrenMode, TypeInfo returnType) throws HiveException { int numChildren = childExpr == null ? 0: childExpr.size(); List children = new ArrayList(); Object[] arguments = new Object[numChildren]; @@ -496,23 +660,23 @@ private VectorExpression createVectorExpression(Class vectorClass, List udf) { return Mode.PROJECTION; } - private VectorExpression instantiateExpression(Class vclass, Object...args) + private VectorExpression instantiateExpression(Class vclass, TypeInfo returnType, Object...args) throws HiveException { + VectorExpression ve = null; Constructor ctor = getConstructor(vclass); int numParams = ctor.getParameterTypes().length; int argsLength = (args == null) ? 0 : args.length; try { if (numParams == 0) { - return (VectorExpression) ctor.newInstance(); + ve = (VectorExpression) ctor.newInstance(); } else if (numParams == argsLength) { - return (VectorExpression) ctor.newInstance(args); + ve = (VectorExpression) ctor.newInstance(args); } else if (numParams == argsLength + 1) { // Additional argument is needed, which is the outputcolumn. - String outType = ((VectorExpression) vclass.newInstance()).getOutputType(); + String outType; + + // Special handling for decimal because decimal types need scale and precision parameter. + // This special handling should be avoided by using returnType uniformly for all cases. + if (returnType != null) { + outType = getNormalizedTypeName(returnType.getTypeName()).toLowerCase(); + } else { + outType = ((VectorExpression) vclass.newInstance()).getOutputType(); + } int outputCol = ocm.allocateOutputColumn(outType); Object [] newArgs = Arrays.copyOf(args, numParams); newArgs[numParams-1] = outputCol; - return (VectorExpression) ctor.newInstance(newArgs); + ve = (VectorExpression) ctor.newInstance(newArgs); + ve.setOutputType(outType); } } catch (Exception ex) { throw new HiveException("Could not instantiate " + vclass.getSimpleName(), ex); } - return null; + return ve; } private VectorExpression getGenericUdfVectorExpression(GenericUDF udf, - List childExpr, Mode mode) throws HiveException { + List childExpr, Mode mode, TypeInfo returnType) throws HiveException { //First handle special cases if (udf instanceof GenericUDFBetween) { return getBetweenFilterExpression(childExpr, mode); @@ -567,10 +741,13 @@ private VectorExpression getGenericUdfVectorExpression(GenericUDF udf, } else if (udf instanceof GenericUDFOPPositive) { return getIdentityExpression(childExpr); } else if (udf instanceof GenericUDFBridge) { - VectorExpression v = getGenericUDFBridgeVectorExpression((GenericUDFBridge) udf, childExpr, mode); + VectorExpression v = getGenericUDFBridgeVectorExpression((GenericUDFBridge) udf, childExpr, mode, + returnType); if (v != null) { return v; } + } else if (udf instanceof GenericUDFToDecimal) { + return getCastToDecimal(childExpr, returnType); } // Now do a general lookup @@ -580,10 +757,12 @@ private VectorExpression getGenericUdfVectorExpression(GenericUDF udf, } List constantFoldedChildren = foldConstantsForUnaryExprs(childExpr); - VectorExpression ve = getVectorExpressionForUdf(udfClass, constantFoldedChildren, mode); + VectorExpression ve = getVectorExpressionForUdf(udfClass, constantFoldedChildren, mode, returnType); + if (ve == null) { throw new HiveException("Udf: "+udf.getClass().getSimpleName()+", is not supported"); } + return ve; } @@ -593,6 +772,7 @@ private VectorExpression getGenericUdfVectorExpression(GenericUDF udf, private VectorExpression getInExpression(List childExpr, Mode mode) throws HiveException { ExprNodeDesc colExpr = childExpr.get(0); + TypeInfo colTypeInfo = colExpr.getTypeInfo(); String colType = colExpr.getTypeString(); // prepare arguments for createVectorExpression @@ -617,7 +797,7 @@ private VectorExpression getInExpression(List childExpr, Mode mode for (int i = 0; i != inVals.length; i++) { inVals[i] = getIntFamilyScalarAsLong((ExprNodeConstantDesc) childrenForInList.get(i)); } - expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION); + expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION, colTypeInfo); ((ILongInExpr) expr).setInListValues(inVals); } else if (colType.equals("timestamp")) { cl = (mode == Mode.FILTER ? FilterLongColumnInList.class : LongColumnInList.class); @@ -625,7 +805,7 @@ private VectorExpression getInExpression(List childExpr, Mode mode for (int i = 0; i != inVals.length; i++) { inVals[i] = getTimestampScalar(childrenForInList.get(i)); } - expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION); + expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION, colTypeInfo); ((ILongInExpr) expr).setInListValues(inVals); } else if (colType.equals("string")) { cl = (mode == Mode.FILTER ? FilterStringColumnInList.class : StringColumnInList.class); @@ -633,7 +813,7 @@ private VectorExpression getInExpression(List childExpr, Mode mode for (int i = 0; i != inVals.length; i++) { inVals[i] = getStringScalarAsByteArray((ExprNodeConstantDesc) childrenForInList.get(i)); } - expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION); + expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION, colTypeInfo); ((IStringInExpr) expr).setInListValues(inVals); } else if (isFloatFamily(colType)) { cl = (mode == Mode.FILTER ? FilterDoubleColumnInList.class : DoubleColumnInList.class); @@ -641,7 +821,7 @@ private VectorExpression getInExpression(List childExpr, Mode mode for (int i = 0; i != inValsD.length; i++) { inValsD[i] = getNumericScalarAsDouble(childrenForInList.get(i)); } - expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION); + expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION, colTypeInfo); ((IDoubleInExpr) expr).setInListValues(inValsD); } @@ -664,28 +844,91 @@ private VectorExpression getInExpression(List childExpr, Mode mode * descriptor based lookup. */ private VectorExpression getGenericUDFBridgeVectorExpression(GenericUDFBridge udf, - List childExpr, Mode mode) throws HiveException { + List childExpr, Mode mode, TypeInfo returnType) throws HiveException { Class cl = udf.getUdfClass(); if (isCastToIntFamily(cl)) { return getCastToLongExpression(childExpr); } else if (cl.equals(UDFToBoolean.class)) { return getCastToBoolean(childExpr); } else if (isCastToFloatFamily(cl)) { - return getCastToDoubleExpression(cl, childExpr); + return getCastToDoubleExpression(cl, childExpr, returnType); } else if (cl.equals(UDFToString.class)) { return getCastToString(childExpr); } return null; } + private VectorExpression getCastToDecimal(List childExpr, TypeInfo returnType) + throws HiveException { + ExprNodeDesc child = childExpr.get(0); + String inputType = childExpr.get(0).getTypeString(); + if (child instanceof ExprNodeConstantDesc) { + // Return a constant vector expression + Object constantValue = ((ExprNodeConstantDesc) child).getValue(); + Decimal128 decimalValue = castConstantToDecimal(constantValue, child.getTypeInfo()); + return getConstantVectorExpression(decimalValue, returnType, Mode.PROJECTION); + } + if (isIntFamily(inputType)) { + return createVectorExpression(CastLongToDecimal.class, childExpr, Mode.PROJECTION, returnType); + } else if (isFloatFamily(inputType)) { + return createVectorExpression(CastDoubleToDecimal.class, childExpr, Mode.PROJECTION, returnType); + } else if (decimalTypePattern.matcher(inputType).matches()) { + return createVectorExpression(CastDecimalToDecimal.class, childExpr, Mode.PROJECTION, + returnType); + } else if (isStringFamily(inputType)) { + return createVectorExpression(CastStringToDecimal.class, childExpr, Mode.PROJECTION, returnType); + } + throw new HiveException("Unhandled cast input type: " + inputType); + } + + private Decimal128 castConstantToDecimal(Object scalar, TypeInfo type) throws HiveException { + PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type; + String typename = type.getTypeName(); + Decimal128 d = new Decimal128(); + int scale = HiveDecimalUtils.getScaleForType(ptinfo); + switch (ptinfo.getPrimitiveCategory()) { + case FLOAT: + float floatVal = ((Float) scalar).floatValue(); + d.update(floatVal, (short) scale); + break; + case DOUBLE: + double doubleVal = ((Double) scalar).doubleValue(); + d.update(doubleVal, (short) scale); + break; + case BYTE: + byte byteVal = ((Byte) scalar).byteValue(); + d.update(byteVal, (short) scale); + break; + case SHORT: + short shortVal = ((Short) scalar).shortValue(); + d.update(shortVal, (short) scale); + break; + case INT: + int intVal = ((Integer) scalar).intValue(); + d.update(intVal, (short) scale); + break; + case LONG: + long longVal = ((Long) scalar).longValue(); + d.update(longVal, (short) scale); + break; + case DECIMAL: + HiveDecimal decimalVal = (HiveDecimal) scalar; + d.update(decimalVal.unscaledValue(), (short) scale); + break; + default: + throw new HiveException("Unsupported type "+typename+" for cast to Decimal128"); + } + return d; + } + private VectorExpression getCastToString(List childExpr) throws HiveException { String inputType = childExpr.get(0).getTypeString(); if (inputType.equals("boolean")) { // Boolean must come before the integer family. It's a special case. - return createVectorExpression(CastBooleanToStringViaLongToString.class, childExpr, Mode.PROJECTION); + return createVectorExpression(CastBooleanToStringViaLongToString.class, childExpr, Mode.PROJECTION, null); } else if (isIntFamily(inputType)) { - return createVectorExpression(CastLongToString.class, childExpr, Mode.PROJECTION); + return createVectorExpression(CastLongToString.class, childExpr, Mode.PROJECTION, null); } /* The string type is deliberately omitted -- the planner removes string to string casts. * Timestamp, float, and double types are handled by the legacy code path. See isLegacyPathUDF. @@ -694,13 +937,14 @@ private VectorExpression getCastToString(List childExpr) throw new HiveException("Unhandled cast input type: " + inputType); } - private VectorExpression getCastToDoubleExpression(Class udf, List childExpr) - throws HiveException { + private VectorExpression getCastToDoubleExpression(Class udf, List childExpr, + TypeInfo returnType) throws HiveException { String inputType = childExpr.get(0).getTypeString(); if (isIntFamily(inputType)) { - return createVectorExpression(CastLongToDouble.class, childExpr, Mode.PROJECTION); + return createVectorExpression(CastLongToDouble.class, childExpr, Mode.PROJECTION, returnType); } else if (inputType.equals("timestamp")) { - return createVectorExpression(CastTimestampToDoubleViaLongToDouble.class, childExpr, Mode.PROJECTION); + return createVectorExpression(CastTimestampToDoubleViaLongToDouble.class, childExpr, Mode.PROJECTION, + returnType); } else if (isFloatFamily(inputType)) { // float types require no conversion, so use a no-op @@ -708,7 +952,7 @@ private VectorExpression getCastToDoubleExpression(Class udf, List childExpr) @@ -718,7 +962,7 @@ private VectorExpression getCastToBoolean(List childExpr) if (inputType.equals("string")) { // string casts to false if it is 0 characters long, otherwise true VectorExpression lenExpr = createVectorExpression(StringLength.class, childExpr, - Mode.PROJECTION); + Mode.PROJECTION, null); int outputCol = ocm.allocateOutputColumn("integer"); VectorExpression lenToBoolExpr = @@ -804,7 +1048,7 @@ private VectorExpression getBetweenFilterExpression(List childExpr } } - return createVectorExpression(cl, childrenAfterNot, Mode.PROJECTION); + return createVectorExpression(cl, childrenAfterNot, Mode.PROJECTION, null); } /* @@ -855,6 +1099,7 @@ private VectorExpression getCustomUDFExpression(ExprNodeGenericFuncDesc expr) int outputCol = -1; String resultType = expr.getTypeInfo().getTypeName(); String resultColVectorType = getNormalizedTypeName(resultType); + outputCol = ocm.allocateOutputColumn(resultColVectorType); // Make vectorized operator @@ -901,21 +1146,6 @@ public static boolean isIntFamily(String resultType) { || resultType.equalsIgnoreCase("long"); } - public static String mapJavaTypeToVectorType(String javaType) - throws HiveException { - if (isStringFamily(javaType)) { - return "string"; - } - if (isFloatFamily(javaType)) { - return "double"; - } - if (isIntFamily(javaType) || - isDatetimeFamily(javaType)) { - return "bigint"; - } - throw new HiveException("Unsuported type for vectorization: " + javaType); - } - private Object getScalarValue(ExprNodeConstantDesc constDesc) throws HiveException { if (constDesc.getTypeString().equalsIgnoreCase("String")) { @@ -931,6 +1161,11 @@ private Object getScalarValue(ExprNodeConstantDesc constDesc) } else { return 0; } + } else if (decimalTypePattern.matcher(constDesc.getTypeString()).matches()) { + HiveDecimal hd = (HiveDecimal) constDesc.getValue(); + Decimal128 dvalue = new Decimal128(); + dvalue.update(hd.unscaledValue(), (short) hd.scale()); + return dvalue; } else { return constDesc.getValue(); } @@ -1029,6 +1264,10 @@ static String getNormalizedTypeName(String colType) { normalizedType = "Double"; } else if (colType.equalsIgnoreCase("String")) { normalizedType = "String"; + } else if (decimalTypePattern.matcher(colType.toLowerCase()).matches()) { + + //Return the decimal type as is, it includes scale and precision. + normalizedType = colType; } else { normalizedType = "Long"; } @@ -1110,12 +1349,6 @@ public VectorAggregateExpression getAggregatorExpression(AggregationDesc desc) "\" for type: \"" + inputType + ""); } - static Object[][] columnTypes = { - {"Double", DoubleColumnVector.class}, - {"Long", LongColumnVector.class}, - {"String", BytesColumnVector.class}, - }; - public Map getOutputColumnTypeMap() { Map map = new HashMap(); for (int i = 0; i < ocm.outputColCount; i++) { @@ -1129,16 +1362,6 @@ public VectorAggregateExpression getAggregatorExpression(AggregationDesc desc) return columnMap; } - public static ColumnVector allocateColumnVector(String type, int defaultSize) { - if (isFloatFamily(type)) { - return new DoubleColumnVector(defaultSize); - } else if (isStringFamily(type)) { - return new BytesColumnVector(defaultSize); - } else { - return new LongColumnVector(defaultSize); - } - } - public void addToColumnMap(String columnName, int outputColumn) throws HiveException { if (columnMap.containsKey(columnName) && (columnMap.get(columnName) != outputColumn)) { throw new HiveException(String.format("Column %s is already mapped to %d. Cannot remap to %d.", @@ -1146,21 +1369,4 @@ public void addToColumnMap(String columnName, int outputColumn) throws HiveExcep } columnMap.put(columnName, outputColumn); } - - public Map getMapVectorExpressions( - Map> expressions) throws HiveException { - Map result = new HashMap(); - if (null != expressions) { - for(T key: expressions.keySet()) { - result.put(key, getVectorExpressions(expressions.get(key))); - } - } - return result; - } - - public void addOutputColumn(String columnName, String columnType) throws HiveException { - String vectorType = mapJavaTypeToVectorType(columnType); - int columnIndex = ocm.allocateOutputColumn(vectorType); - this.addToColumnMap(columnName, columnIndex); - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index 6e79979..f48853a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -24,6 +24,8 @@ import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; @@ -370,11 +372,25 @@ private void addScratchColumnsToBatch(VectorizedRowBatch vrb) { } } + private int[] getScalePrecisionFromDecimalType(String decimalType) { + Pattern p = Pattern.compile("\\d+"); + Matcher m = p.matcher(decimalType); + m.find(); + int precision = Integer.parseInt(m.group()); + m.find(); + int scale = Integer.parseInt(m.group()); + int [] precScale = { precision, scale }; + return precScale; + } + private ColumnVector allocateColumnVector(String type, int defaultSize) { if (type.equalsIgnoreCase("double")) { return new DoubleColumnVector(defaultSize); } else if (type.equalsIgnoreCase("string")) { return new BytesColumnVector(defaultSize); + } else if (VectorizationContext.decimalTypePattern.matcher(type.toLowerCase()).matches()){ + int [] precisionScale = getScalePrecisionFromDecimalType(type); + return new DecimalColumnVector(defaultSize, precisionScale[0], precisionScale[1]); } else { return new LongColumnVector(defaultSize); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java index 6a7762d..7bbe153 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java @@ -27,6 +27,10 @@ public class CastDecimalToBoolean extends FuncDecimalToLong { private static final long serialVersionUID = 1L; + public CastDecimalToBoolean() { + super(); + } + public CastDecimalToBoolean(int inputColumn, int outputColumn) { super(inputColumn, outputColumn); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java index 14b91e1..a436fa8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java @@ -40,10 +40,12 @@ public CastDecimalToDecimal(int inputColumn, int outputColumn) { this.inputColumn = inputColumn; this.outputColumn = outputColumn; + this.outputType = "decimal"; } public CastDecimalToDecimal() { super(); + this.outputType = "decimal"; } /** @@ -148,11 +150,6 @@ public void setInputColumn(int inputColumn) { } @Override - public String getOutputType() { - return "decimal"; - } - - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder(); b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java index 2ba1509..9cf97f4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java @@ -25,6 +25,10 @@ private static final long serialVersionUID = 1L; + public CastDecimalToDouble() { + super(); + } + public CastDecimalToDouble(int inputCol, int outputCol) { super(inputCol, outputCol); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java index 65a804d..d5f34d5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java @@ -27,6 +27,10 @@ public class CastDecimalToLong extends FuncDecimalToLong { private static final long serialVersionUID = 1L; + public CastDecimalToLong() { + super(); + } + public CastDecimalToLong(int inputColumn, int outputColumn) { super(inputColumn, outputColumn); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java index 5b2a658..2e8c3a4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java @@ -28,6 +28,10 @@ private static final long serialVersionUID = 1L; + public CastDecimalToString() { + super(); + } + public CastDecimalToString(int inputColumn, int outputColumn) { super(inputColumn, outputColumn); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java index 14e30c3..36a1fcb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java @@ -28,6 +28,10 @@ private static final long serialVersionUID = 1L; + public CastDoubleToDecimal() { + super(); + } + public CastDoubleToDecimal(int inputColumn, int outputColumn) { super(inputColumn, outputColumn); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java index 1d4d84d..d1a4977 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java @@ -30,6 +30,10 @@ private static final long serialVersionUID = 1L; + public CastLongToDecimal() { + super(); + } + public CastLongToDecimal(int inputColumn, int outputColumn) { super(inputColumn, outputColumn); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java index 41762ed..7317141 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java @@ -38,10 +38,12 @@ public CastStringToDecimal(int inputColumn, int outputColumn) { this.inputColumn = inputColumn; this.outputColumn = outputColumn; + this.outputType = "decimal"; } public CastStringToDecimal() { super(); + this.outputType = "decimal"; } /** @@ -152,11 +154,6 @@ public void setInputColumn(int inputColumn) { } @Override - public String getOutputType() { - return "decimal"; - } - - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder(); b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java index 37e92e1..0f9874b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java @@ -28,6 +28,10 @@ private static final long serialVersionUID = 1L; + public CastTimestampToDecimal() { + super(); + } + public CastTimestampToDecimal(int inputColumn, int outputColumn) { super(inputColumn, outputColumn); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java index cac1d80..901005e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java @@ -18,7 +18,9 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; /** * Constant is represented as a vector with repeating values. @@ -30,14 +32,15 @@ private static enum Type { LONG, DOUBLE, - BYTES + BYTES, + DECIMAL } private int outputColumn; protected long longValue = 0; private double doubleValue = 0; private byte[] bytesValue = null; - private String typeString; + private Decimal128 decimalValue = null; private Type type; private int bytesValueLength = 0; @@ -67,6 +70,11 @@ public ConstantVectorExpression(int outputColumn, byte[] value) { setBytesValue(value); } + public ConstantVectorExpression(int outputColumn, Decimal128 value) { + this(outputColumn, "decimal"); + setDecimalValue(value); + } + private void evaluateLong(VectorizedRowBatch vrg) { LongColumnVector cv = (LongColumnVector) vrg.cols[outputColumn]; cv.isRepeating = true; @@ -88,6 +96,13 @@ private void evaluateBytes(VectorizedRowBatch vrg) { cv.setRef(0, bytesValue, 0, bytesValueLength); } + private void evaluateDecimal(VectorizedRowBatch vrg) { + DecimalColumnVector dcv = (DecimalColumnVector) vrg.cols[outputColumn]; + dcv.isRepeating = true; + dcv.noNulls = true; + dcv.vector[0] = decimalValue; + } + @Override public void evaluate(VectorizedRowBatch vrg) { switch (type) { @@ -100,6 +115,9 @@ public void evaluate(VectorizedRowBatch vrg) { case BYTES: evaluateBytes(vrg); break; + case DECIMAL: + evaluateDecimal(vrg); + break; } } @@ -138,16 +156,22 @@ public void setBytesValue(byte[] bytesValue) { this.bytesValueLength = bytesValue.length; } + public void setDecimalValue(Decimal128 decimalValue) { + this.decimalValue = decimalValue; + } + public String getTypeString() { - return typeString; + return getOutputType(); } public void setTypeString(String typeString) { - this.typeString = typeString; + this.outputType = typeString; if ("string".equalsIgnoreCase(typeString)) { this.type = Type.BYTES; } else if ("double".equalsIgnoreCase(typeString)) { this.type = Type.DOUBLE; + } else if (VectorizationContext.decimalTypePattern.matcher(typeString).matches()){ + this.type = Type.DECIMAL; } else { this.type = Type.LONG; } @@ -166,6 +190,11 @@ public void setType(Type type) { } @Override + public void setOutputType(String type) { + setTypeString(type); + } + + @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { return (new VectorExpressionDescriptor.Builder()).build(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java index 8b2a6f0..b79e5a4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java @@ -36,10 +36,12 @@ public FuncDoubleToDecimal(int inputColumn, int outputColumn) { this.inputColumn = inputColumn; this.outputColumn = outputColumn; + this.outputType = "decimal"; } public FuncDoubleToDecimal() { super(); + this.outputType = "decimal"; } abstract protected void func(DecimalColumnVector outV, DoubleColumnVector inV, int i); @@ -130,11 +132,6 @@ public void setInputColumn(int inputColumn) { } @Override - public String getOutputType() { - return "decimal"; - } - - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder(); b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java index 18d1dbb..770c117 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java @@ -36,10 +36,12 @@ public FuncLongToDecimal(int inputColumn, int outputColumn) { this.inputColumn = inputColumn; this.outputColumn = outputColumn; + this.outputType = "decimal"; } public FuncLongToDecimal() { super(); + this.outputType = "decimal"; } abstract protected void func(DecimalColumnVector outV, LongColumnVector inV, int i); @@ -130,11 +132,6 @@ public void setInputColumn(int inputColumn) { } @Override - public String getOutputType() { - return "decimal"; - } - - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder(); b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java index d00d99b..6dac109 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java @@ -35,6 +35,11 @@ protected VectorExpression [] childExpressions = null; /** + * Output type of the expression. + */ + protected String outputType; + + /** * This is the primary method to implement expression logic. * @param batch */ @@ -50,7 +55,16 @@ /** * Returns type of the output column. */ - public abstract String getOutputType(); + public String getOutputType() { + return outputType; + } + + /** + * Set type of the output column. + */ + public void setOutputType(String type) { + this.outputType = type; + } /** * Initialize the child expressions. diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 85ac533..21fe8ca 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -23,11 +23,9 @@ import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; import java.util.Stack; import java.util.regex.Pattern; @@ -122,45 +120,7 @@ import org.apache.hadoop.hive.ql.udf.UDFToString; import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear; import org.apache.hadoop.hive.ql.udf.UDFYear; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFAbs; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCase; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCeil; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFloor; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIf; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLTrim; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLower; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPDivide; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMinus; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMod; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMultiply; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPositive; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFPower; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFPosMod; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFRTrim; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTrim; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen; +import org.apache.hadoop.hive.ql.udf.generic.*; public class Vectorizer implements PhysicalPlanResolver { @@ -281,6 +241,7 @@ public Vectorizer() { supportedGenericUDFs.add(UDFToDouble.class); supportedGenericUDFs.add(UDFToString.class); supportedGenericUDFs.add(GenericUDFTimestamp.class); + supportedGenericUDFs.add(GenericUDFToDecimal.class); // For conditional expressions supportedGenericUDFs.add(GenericUDFIf.class); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java index 4f59125..d274531 100755 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -39,7 +40,8 @@ * UDFToByte. * */ -@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class}) +@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class, + CastDecimalToLong.class}) public class UDFToByte extends UDF { private final ByteWritable byteWritable = new ByteWritable(); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java index e4dfcc9..8084537 100755 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToDouble; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToDoubleViaLongToDouble; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -37,7 +38,8 @@ * UDFToDouble. * */ -@VectorizedExpressions({CastTimestampToDoubleViaLongToDouble.class, CastLongToDouble.class}) +@VectorizedExpressions({CastTimestampToDoubleViaLongToDouble.class, CastLongToDouble.class, + CastDecimalToDouble.class}) public class UDFToDouble extends UDF { private final DoubleWritable doubleWritable = new DoubleWritable(); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java index 4e2d1d4..129da43 100755 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToDouble; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToDoubleViaLongToDouble; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -38,7 +39,8 @@ * UDFToFloat. * */ -@VectorizedExpressions({CastTimestampToDoubleViaLongToDouble.class, CastLongToDouble.class}) +@VectorizedExpressions({CastTimestampToDoubleViaLongToDouble.class, CastLongToDouble.class, + CastDecimalToDouble.class}) public class UDFToFloat extends UDF { private final FloatWritable floatWritable = new FloatWritable(); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java index 6f9746c..789c780 100755 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -39,7 +40,8 @@ * UDFToInteger. * */ -@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class}) +@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class, + CastDecimalToLong.class}) public class UDFToInteger extends UDF { private final IntWritable intWritable = new IntWritable(); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java index e794e92..04ff7cf 100755 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -39,7 +40,8 @@ * UDFToLong. * */ -@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class}) +@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class, + CastDecimalToLong.class}) public class UDFToLong extends UDF { private final LongWritable longWritable = new LongWritable(); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java index 4e64d47..5315552 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java @@ -20,6 +20,8 @@ import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble; +import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -39,7 +41,8 @@ * UDFToShort. * */ -@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class}) +@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class, + CastDecimalToLong.class}) public class UDFToShort extends UDF { ShortWritable shortWritable = new ShortWritable(); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java index 9a04e81..96f970e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java @@ -24,15 +24,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColDivideLongColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColDivideLongScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarDivideLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarDivideDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarDivideLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColDivideDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColDivideDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarDivideDoubleColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; @@ -51,7 +43,9 @@ LongColDivideLongScalar.class, LongColDivideDoubleScalar.class, DoubleColDivideLongScalar.class, DoubleColDivideDoubleScalar.class, LongScalarDivideLongColumn.class, LongScalarDivideDoubleColumn.class, - DoubleScalarDivideLongColumn.class, DoubleScalarDivideDoubleColumn.class}) + DoubleScalarDivideLongColumn.class, DoubleScalarDivideDoubleColumn.class, + DecimalColDivideDecimalColumn.class, DecimalColDivideDecimalScalar.class, + DecimalScalarDivideDecimalColumn.class}) public class GenericUDFOPDivide extends GenericUDFBaseNumeric { public GenericUDFOPDivide() { diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java index 3479b13..cf104d3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java @@ -20,36 +20,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleScalarEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleScalarEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarEqualStringColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -71,7 +42,9 @@ FilterLongColEqualLongScalar.class, FilterLongColEqualDoubleScalar.class, FilterDoubleColEqualLongScalar.class, FilterDoubleColEqualDoubleScalar.class, FilterLongScalarEqualLongColumn.class, FilterLongScalarEqualDoubleColumn.class, - FilterDoubleScalarEqualLongColumn.class, FilterDoubleScalarEqualDoubleColumn.class}) + FilterDoubleScalarEqualLongColumn.class, FilterDoubleScalarEqualDoubleColumn.class, + FilterDecimalColEqualDecimalColumn.class, FilterDecimalColEqualDecimalScalar.class, + FilterDecimalScalarEqualDecimalColumn.class}) public class GenericUDFOPEqual extends GenericUDFBaseCompare { public GenericUDFOPEqual(){ this.opName = "EQUAL"; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java index edb1bf8..9f8de39 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java @@ -20,36 +20,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColGreaterEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColGreaterEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColGreaterEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColGreaterEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarGreaterEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarGreaterEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColGreaterEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColGreaterEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColGreaterEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColGreaterEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleScalarGreaterEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleScalarGreaterEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarGreaterEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarGreaterEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarGreaterEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColGreaterEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColGreaterEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColGreaterEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColGreaterEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarGreaterEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarGreaterEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColGreaterEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColGreaterEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarGreaterEqualStringColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.io.Text; @@ -72,7 +43,9 @@ FilterLongColGreaterEqualLongScalar.class, FilterLongColGreaterEqualDoubleScalar.class, FilterDoubleColGreaterEqualLongScalar.class, FilterDoubleColGreaterEqualDoubleScalar.class, FilterLongScalarGreaterEqualLongColumn.class, FilterLongScalarGreaterEqualDoubleColumn.class, - FilterDoubleScalarGreaterEqualLongColumn.class, FilterDoubleScalarGreaterEqualDoubleColumn.class}) + FilterDoubleScalarGreaterEqualLongColumn.class, FilterDoubleScalarGreaterEqualDoubleColumn.class, + FilterDecimalColGreaterEqualDecimalColumn.class, FilterDecimalColGreaterEqualDecimalScalar.class, + FilterDecimalScalarGreaterEqualDecimalColumn.class}) public class GenericUDFOPEqualOrGreaterThan extends GenericUDFBaseCompare { public GenericUDFOPEqualOrGreaterThan(){ this.opName = "EQUAL OR GREATER THAN"; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java index 06d9647..b6d4d56 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java @@ -20,36 +20,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColLessEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColLessEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColLessEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColLessEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarLessEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarLessEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColLessEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColLessEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColLessEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColLessEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleScalarLessEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleScalarLessEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColLessEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColLessEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColLessEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColLessEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarLessEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarLessEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColLessEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColLessEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarLessEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColLessEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColLessEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColLessEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColLessEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarLessEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarLessEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColLessEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColLessEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarLessEqualStringColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.io.Text; @@ -72,7 +43,9 @@ FilterLongColLessEqualLongScalar.class, FilterLongColLessEqualDoubleScalar.class, FilterDoubleColLessEqualLongScalar.class, FilterDoubleColLessEqualDoubleScalar.class, FilterLongScalarLessEqualLongColumn.class, FilterLongScalarLessEqualDoubleColumn.class, - FilterDoubleScalarLessEqualLongColumn.class, FilterDoubleScalarLessEqualDoubleColumn.class}) + FilterDoubleScalarLessEqualLongColumn.class, FilterDoubleScalarLessEqualDoubleColumn.class, + FilterDecimalColLessEqualDecimalColumn.class, FilterDecimalColLessEqualDecimalScalar.class, + FilterDecimalScalarLessEqualDecimalColumn.class}) public class GenericUDFOPEqualOrLessThan extends GenericUDFBaseCompare { public GenericUDFOPEqualOrLessThan(){ this.opName = "EQUAL OR LESS THAN"; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java index 28bce88..3ef7b44 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java @@ -20,36 +20,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColGreaterDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColGreaterDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColGreaterLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColGreaterLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarGreaterDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarGreaterLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColGreaterDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColGreaterDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColGreaterLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColGreaterLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleScalarGreaterDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleScalarGreaterLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarGreaterDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarGreaterLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarGreaterStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColGreaterDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColGreaterDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColGreaterLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColGreaterLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarGreaterDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarGreaterLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColGreaterStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColGreaterStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarGreaterStringColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.io.Text; @@ -72,7 +43,9 @@ FilterLongColGreaterLongScalar.class, FilterLongColGreaterDoubleScalar.class, FilterDoubleColGreaterLongScalar.class, FilterDoubleColGreaterDoubleScalar.class, FilterLongScalarGreaterLongColumn.class, FilterLongScalarGreaterDoubleColumn.class, - FilterDoubleScalarGreaterLongColumn.class, FilterDoubleScalarGreaterDoubleColumn.class}) + FilterDoubleScalarGreaterLongColumn.class, FilterDoubleScalarGreaterDoubleColumn.class, + FilterDecimalColGreaterDecimalColumn.class, FilterDecimalColGreaterDecimalScalar.class, + FilterDecimalScalarGreaterDecimalColumn.class}) public class GenericUDFOPGreaterThan extends GenericUDFBaseCompare { public GenericUDFOPGreaterThan(){ this.opName = "GREATER THAN"; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java index 9258b43..27c983e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java @@ -43,7 +43,9 @@ FilterLongColLessLongScalar.class, FilterLongColLessDoubleScalar.class, FilterDoubleColLessLongScalar.class, FilterDoubleColLessDoubleScalar.class, FilterLongScalarLessLongColumn.class, FilterLongScalarLessDoubleColumn.class, - FilterDoubleScalarLessLongColumn.class, FilterDoubleScalarLessDoubleColumn.class}) + FilterDoubleScalarLessLongColumn.class, FilterDoubleScalarLessDoubleColumn.class, + FilterDecimalColLessDecimalColumn.class, FilterDecimalColLessDecimalScalar.class, + FilterDecimalScalarLessDecimalColumn.class}) public class GenericUDFOPLessThan extends GenericUDFBaseCompare { public GenericUDFOPLessThan(){ this.opName = "LESS THAN"; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java index 6ee6f39..3eb605a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java @@ -21,18 +21,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarSubtractDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarSubtractLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarSubtractDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarSubtractLongColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -49,7 +38,9 @@ LongColSubtractLongScalar.class, LongColSubtractDoubleScalar.class, DoubleColSubtractLongScalar.class, DoubleColSubtractDoubleScalar.class, LongScalarSubtractLongColumn.class, LongScalarSubtractDoubleColumn.class, - DoubleScalarSubtractLongColumn.class, DoubleScalarSubtractDoubleColumn.class}) + DoubleScalarSubtractLongColumn.class, DoubleScalarSubtractDoubleColumn.class, + DecimalColSubtractDecimalColumn.class, DecimalColAddDecimalScalar.class, + DecimalScalarSubtractDecimalColumn.class}) public class GenericUDFOPMinus extends GenericUDFBaseNumeric { public GenericUDFOPMinus() { diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java index e7a2a8d..7dc1f83 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java @@ -21,18 +21,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarMultiplyDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarMultiplyLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarMultiplyDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarMultiplyLongColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -49,7 +38,9 @@ LongColMultiplyLongScalar.class, LongColMultiplyDoubleScalar.class, DoubleColMultiplyLongScalar.class, DoubleColMultiplyDoubleScalar.class, LongScalarMultiplyLongColumn.class, LongScalarMultiplyDoubleColumn.class, - DoubleScalarMultiplyLongColumn.class, DoubleScalarMultiplyDoubleColumn.class}) + DoubleScalarMultiplyLongColumn.class, DoubleScalarMultiplyDoubleColumn.class, + DecimalColMultiplyDecimalColumn.class, DecimalColMultiplyDecimalScalar.class, + DecimalScalarMultiplyDecimalColumn.class}) public class GenericUDFOPMultiply extends GenericUDFBaseNumeric { public GenericUDFOPMultiply() { diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java index 4c11e5b..d604cd5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java @@ -20,36 +20,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColNotEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColNotEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColNotEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColNotEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarNotEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarNotEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColNotEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColNotEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColNotEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColNotEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleScalarNotEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleScalarNotEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColNotEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColNotEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColNotEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColNotEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarNotEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarNotEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColNotEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColNotEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarNotEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColNotEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColNotEqualDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColNotEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColNotEqualLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarNotEqualDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarNotEqualLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColNotEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColNotEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarNotEqualStringColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -71,7 +42,9 @@ FilterLongColNotEqualLongScalar.class, FilterLongColNotEqualDoubleScalar.class, FilterDoubleColNotEqualLongScalar.class, FilterDoubleColNotEqualDoubleScalar.class, FilterLongScalarNotEqualLongColumn.class, FilterLongScalarNotEqualDoubleColumn.class, - FilterDoubleScalarNotEqualLongColumn.class, FilterDoubleScalarNotEqualDoubleColumn.class}) + FilterDoubleScalarNotEqualLongColumn.class, FilterDoubleScalarNotEqualDoubleColumn.class, + FilterDecimalColNotEqualDecimalColumn.class, FilterDecimalColNotEqualDecimalScalar.class, + FilterDecimalScalarNotEqualDecimalColumn.class}) public class GenericUDFOPNotEqual extends GenericUDFBaseCompare { public GenericUDFOPNotEqual(){ this.opName = "NOT EQUAL"; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java index 26ac65c..2721e6b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java @@ -21,18 +21,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarAddDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarAddLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddLongScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarAddDoubleColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarAddLongColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -57,7 +46,8 @@ DoubleColAddLongColumn.class, DoubleColAddDoubleColumn.class, LongColAddLongScalar.class, LongColAddDoubleScalar.class, DoubleColAddLongScalar.class, DoubleColAddDoubleScalar.class, LongScalarAddLongColumn.class, LongScalarAddDoubleColumn.class, DoubleScalarAddLongColumn.class, - DoubleScalarAddDoubleColumn.class}) + DoubleScalarAddDoubleColumn.class, DecimalScalarAddDecimalColumn.class, DecimalColAddDecimalColumn.class, + DecimalColAddDecimalScalar.class}) public class GenericUDFOPPlus extends GenericUDFBaseNumeric { public GenericUDFOPPlus() { diff --git ql/src/test/queries/clientpositive/vector_decimal_expressions.q ql/src/test/queries/clientpositive/vector_decimal_expressions.q new file mode 100644 index 0000000..f3b4c83 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_decimal_expressions.q @@ -0,0 +1,4 @@ +CREATE TABLE decimal_test STORED AS ORC AS SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2 FROM alltypesorc; +SET hive.vectorized.execution.enabled=true; +EXPLAIN SELECT cdecimal1 + cdecimal2, cdecimal1 - (2*cdecimal2), ((cdecimal1+2.34)/cdecimal2), (cdecimal1 * (cdecimal2/3.4)) from decimal_test where cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdouble IS NOT NULL LIMIT 10; +SELECT cdecimal1 + cdecimal2, cdecimal1 - (2*cdecimal2), ((cdecimal1+2.34)/cdecimal2), (cdecimal1 * (cdecimal2/3.4)) from decimal_test where cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdouble IS NOT NULL LIMIT 10; diff --git ql/src/test/results/clientpositive/vector_decimal_expressions.q.out ql/src/test/results/clientpositive/vector_decimal_expressions.q.out new file mode 100644 index 0000000..a4be3e7 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_expressions.q.out @@ -0,0 +1,71 @@ +PREHOOK: query: CREATE TABLE date_decimal_test STORED AS ORC AS SELECT cint, cdouble, CAST (CAST (cint AS TIMESTAMP) AS DATE) AS cdate, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal FROM alltypesorc +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@alltypesorc +POSTHOOK: query: CREATE TABLE date_decimal_test STORED AS ORC AS SELECT cint, cdouble, CAST (CAST (cint AS TIMESTAMP) AS DATE) AS cdate, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal FROM alltypesorc +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@date_decimal_test +PREHOOK: query: EXPLAIN SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME date_decimal_test))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL cdate)) (TOK_SELEXPR (TOK_TABLE_OR_COL cdecimal))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL cint)) (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL cdouble)))) (TOK_LIMIT 10))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + date_decimal_test + TableScan + alias: date_decimal_test + Filter Operator + predicate: + expr: (cint is not null and cdouble is not null) + type: boolean + Vectorized execution: true + Select Operator + expressions: + expr: cdate + type: date + expr: cdecimal + type: decimal(20,10) + outputColumnNames: _col0, _col1 + Vectorized execution: true + Limit + Vectorized execution: true + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Vectorized execution: true + + Stage: Stage-0 + Fetch Operator + limit: 10 + +PREHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@date_decimal_test +#### A masked pattern was here #### +POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@date_decimal_test +#### A masked pattern was here #### +1970-01-06 -7959.5837837838 +1970-01-06 -2516.4135135135 +1970-01-06 -9445.0621621622 +1970-01-06 -5713.7459459459 +1970-01-06 8963.6405405405 +1970-01-06 4193.6243243243 +1970-01-06 2964.3864864865 +1970-01-06 -4673.2540540541 +1970-01-06 -9216.8945945946 +1970-01-06 -9287.3756756757