diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 371cb0f..733f860 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -905,7 +905,11 @@ // none is the default(past) behavior. Implies only alphaNumeric and underscore are valid characters in identifiers. // column: implies column names can contain any character. HIVE_QUOTEDID_SUPPORT("hive.support.quoted.identifiers", "column", - new PatternValidator("none", "column")) + new PatternValidator("none", "column")), + + // Enable more ANSI SQL compliant behavior + // This will change the numeric result type of arithmetic operators to be more SQL compliant. + HIVE_SQL_ANSI("hive.sql.ansi", false) ; public final String varname; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java index 28b5ffd..f3f5a3a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java @@ -22,12 +22,14 @@ import java.util.List; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -40,6 +42,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -203,13 +206,53 @@ private PrimitiveTypeInfo deriveResultTypeInfo() throws UDFArgumentException { // If any of the type isn't exact, double is chosen. if (!FunctionRegistry.isExactNumericType(left) || !FunctionRegistry.isExactNumericType(right)) { - return TypeInfoFactory.doubleTypeInfo; + return deriveResultApproxTypeInfo(); } return deriveResultExactTypeInfo(); } /** + * Default implementation for getting the approximate type info for the operator result. + * Divide operator overrides this. + * @return + */ + protected PrimitiveTypeInfo deriveResultApproxTypeInfo() { + if (SessionState.get().getConf().getBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI)) { + return deriveResultApproxTypeInfoAnsiSql(); + } + return deriveResultApproxTypeInfoBackwardCompat(); + } + + protected PrimitiveTypeInfo deriveResultApproxTypeInfoAnsiSql() { + return TypeInfoFactory.doubleTypeInfo; + } + + protected PrimitiveTypeInfo deriveResultApproxTypeInfoBackwardCompat() { + PrimitiveTypeInfo left = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(leftOI); + PrimitiveTypeInfo right = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(rightOI); + + // string types get converted to double + if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(left.getPrimitiveCategory()) + == PrimitiveGrouping.STRING_GROUP) { + left = TypeInfoFactory.doubleTypeInfo; + } + if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(right.getPrimitiveCategory()) + == PrimitiveGrouping.STRING_GROUP) { + right = TypeInfoFactory.doubleTypeInfo; + } + + // Use type promotion + PrimitiveCategory commonCat = FunctionRegistry.getCommonCategory(left, right); + if (commonCat == PrimitiveCategory.DECIMAL) { + // Hive 0.12 behavior where double * decimal -> decimal is gone. + return TypeInfoFactory.doubleTypeInfo; + } else { + return left.getPrimitiveCategory() == commonCat ? left : right; + } + } + + /** * Default implementation for getting the exact type info for the operator result. It worked for all * but divide operator. * diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java index 9a04e81..8d8ab29 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.udf.generic; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColDivideLongColumn; @@ -33,11 +34,14 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColDivideDoubleColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColDivideDoubleScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarDivideDoubleColumn; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; /** * Note that in SQL, the return type of divide is not necessarily the same @@ -61,10 +65,34 @@ public GenericUDFOPDivide() { @Override protected PrimitiveTypeInfo deriveResultExactTypeInfo() { + if (SessionState.get().getConf().getBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI)) { + return deriveResultExactTypeInfoAnsiSql(); + } + return deriveResultExactTypeInfoBackwardsCompat(); + } + + protected PrimitiveTypeInfo deriveResultExactTypeInfoAnsiSql() { // No type promotion. Everything goes to decimal. return deriveResultDecimalTypeInfo(); } + protected PrimitiveTypeInfo deriveResultExactTypeInfoBackwardsCompat() { + // Preserve existing return type behavior for division: + // Non-decimal division should return double + if (leftOI.getPrimitiveCategory() != PrimitiveCategory.DECIMAL + && rightOI.getPrimitiveCategory() != PrimitiveCategory.DECIMAL) { + return TypeInfoFactory.doubleTypeInfo; + } + + return deriveResultDecimalTypeInfo(); + } + + @Override + protected PrimitiveTypeInfo deriveResultApproxTypeInfoBackwardCompat() { + // Hive 0.12 behavior where double / decimal -> decimal is gone. + return TypeInfoFactory.doubleTypeInfo; + } + @Override protected DoubleWritable evaluate(DoubleWritable left, DoubleWritable right) { if (right.get() == 0.0) { diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java index e894b54..8b1c86a 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java @@ -19,7 +19,9 @@ package org.apache.hadoop.hive.ql.udf.generic; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -30,6 +32,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; @@ -37,7 +40,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFOPDivide { +public class TestGenericUDFOPDivide extends TestGenericUDFOPNumeric { @Test public void testByteDivideShort() throws HiveException { @@ -248,4 +251,44 @@ private void testDecimalDivisionResultType(int prec1, int scale1, int prec2, int Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(prec3, scale3), oi.getTypeInfo()); } + @Test + public void testReturnTypeBackwardCompat() throws Exception { + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, false); + + verifyReturnType(new GenericUDFOPDivide(), "int", "int", "double"); // different from sql compat mode + verifyReturnType(new GenericUDFOPDivide(), "int", "float", "double"); + verifyReturnType(new GenericUDFOPDivide(), "int", "double", "double"); + verifyReturnType(new GenericUDFOPDivide(), "int", "decimal(10,2)", "decimal(23,11)"); + + verifyReturnType(new GenericUDFOPDivide(), "float", "float", "double"); + verifyReturnType(new GenericUDFOPDivide(), "float", "double", "double"); + verifyReturnType(new GenericUDFOPDivide(), "float", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPDivide(), "double", "double", "double"); + verifyReturnType(new GenericUDFOPDivide(), "double", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPDivide(), "decimal(10,2)", "decimal(10,2)", "decimal(23,13)"); + + // Most tests are done with ANSI SQL mode enabled, set it back to true + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, true); + } + + @Test + public void testReturnTypeAnsiSql() throws Exception { + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, true); + + verifyReturnType(new GenericUDFOPDivide(), "int", "int", "decimal(21,11)"); + verifyReturnType(new GenericUDFOPDivide(), "int", "float", "double"); + verifyReturnType(new GenericUDFOPDivide(), "int", "double", "double"); + verifyReturnType(new GenericUDFOPDivide(), "int", "decimal(10,2)", "decimal(23,11)"); + + verifyReturnType(new GenericUDFOPDivide(), "float", "float", "double"); + verifyReturnType(new GenericUDFOPDivide(), "float", "double", "double"); + verifyReturnType(new GenericUDFOPDivide(), "float", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPDivide(), "double", "double", "double"); + verifyReturnType(new GenericUDFOPDivide(), "double", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPDivide(), "decimal(10,2)", "decimal(10,2)", "decimal(23,13)"); + } } diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java index d7285d9..2f77ccc 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java @@ -19,7 +19,9 @@ package org.apache.hadoop.hive.ql.udf.generic; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -37,7 +39,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFOPMinus { +public class TestGenericUDFOPMinus extends TestGenericUDFOPNumeric { @Test public void testByteMinusShort() throws HiveException { @@ -201,4 +203,44 @@ public void testDecimalMinusDecimalSameParams() throws HiveException { Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 2), oi.getTypeInfo()); } + @Test + public void testReturnTypeBackwardCompat() throws Exception { + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, false); + + verifyReturnType(new GenericUDFOPMinus(), "int", "int", "int"); + verifyReturnType(new GenericUDFOPMinus(), "int", "float", "float"); // different from sql compat mode + verifyReturnType(new GenericUDFOPMinus(), "int", "double", "double"); + verifyReturnType(new GenericUDFOPMinus(), "int", "decimal(10,2)", "decimal(13,2)"); + + verifyReturnType(new GenericUDFOPMinus(), "float", "float", "float"); // different from sql compat mode + verifyReturnType(new GenericUDFOPMinus(), "float", "double", "double"); + verifyReturnType(new GenericUDFOPMinus(), "float", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPMinus(), "double", "double", "double"); + verifyReturnType(new GenericUDFOPMinus(), "double", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPMinus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)"); + + // Most tests are done with ANSI SQL mode enabled, set it back to true + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, true); + } + + @Test + public void testReturnTypeAnsiSql() throws Exception { + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, true); + + verifyReturnType(new GenericUDFOPMinus(), "int", "int", "int"); + verifyReturnType(new GenericUDFOPMinus(), "int", "float", "double"); + verifyReturnType(new GenericUDFOPMinus(), "int", "double", "double"); + verifyReturnType(new GenericUDFOPMinus(), "int", "decimal(10,2)", "decimal(13,2)"); + + verifyReturnType(new GenericUDFOPMinus(), "float", "float", "double"); + verifyReturnType(new GenericUDFOPMinus(), "float", "double", "double"); + verifyReturnType(new GenericUDFOPMinus(), "float", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPMinus(), "double", "double", "double"); + verifyReturnType(new GenericUDFOPMinus(), "double", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPMinus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)"); + } } diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java index ef17eb5..abc0069 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java @@ -36,7 +36,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFOPMod { +public class TestGenericUDFOPMod extends TestGenericUDFOPNumeric { @Test public void testModByZero1() throws HiveException { diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java index 56472f0..a8521d6 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java @@ -19,7 +19,9 @@ package org.apache.hadoop.hive.ql.udf.generic; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -37,7 +39,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFOPMultiply { +public class TestGenericUDFOPMultiply extends TestGenericUDFOPNumeric { @Test public void testByteTimesShort() throws HiveException { @@ -200,4 +202,45 @@ public void testDecimalTimesDecimalSameParams() throws HiveException { Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(11, 4), oi.getTypeInfo()); } + + @Test + public void testReturnTypeBackwardCompat() throws Exception { + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, false); + + verifyReturnType(new GenericUDFOPMultiply(), "int", "int", "int"); + verifyReturnType(new GenericUDFOPMultiply(), "int", "float", "float"); // different from sql compat mode + verifyReturnType(new GenericUDFOPMultiply(), "int", "double", "double"); + verifyReturnType(new GenericUDFOPMultiply(), "int", "decimal(10,2)", "decimal(21,2)"); + + verifyReturnType(new GenericUDFOPMultiply(), "float", "float", "float"); // different from sql compat mode + verifyReturnType(new GenericUDFOPMultiply(), "float", "double", "double"); + verifyReturnType(new GenericUDFOPMultiply(), "float", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPMultiply(), "double", "double", "double"); + verifyReturnType(new GenericUDFOPMultiply(), "double", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPMultiply(), "decimal(10,2)", "decimal(10,2)", "decimal(21,4)"); + + // Most tests are done with ANSI SQL mode enabled, set it back to true + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, true); + } + + @Test + public void testReturnTypeAnsiSql() throws Exception { + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, true); + + verifyReturnType(new GenericUDFOPMultiply(), "int", "int", "int"); + verifyReturnType(new GenericUDFOPMultiply(), "int", "float", "double"); + verifyReturnType(new GenericUDFOPMultiply(), "int", "double", "double"); + verifyReturnType(new GenericUDFOPMultiply(), "int", "decimal(10,2)", "decimal(21,2)"); + + verifyReturnType(new GenericUDFOPMultiply(), "float", "float", "double"); + verifyReturnType(new GenericUDFOPMultiply(), "float", "double", "double"); + verifyReturnType(new GenericUDFOPMultiply(), "float", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPMultiply(), "double", "double", "double"); + verifyReturnType(new GenericUDFOPMultiply(), "double", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPMultiply(), "decimal(10,2)", "decimal(10,2)", "decimal(21,4)"); + } } diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java new file mode 100644 index 0000000..de73a48 --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java @@ -0,0 +1,38 @@ +package org.apache.hadoop.hive.ql.udf.generic; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.junit.Assert; + +public abstract class TestGenericUDFOPNumeric { + public TestGenericUDFOPNumeric() { + // Arithmetic operations rely on getting conf from SessionState, need to initialize here. + SessionState ss = new SessionState(new HiveConf()); + ss.getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, true); + SessionState.setCurrentSessionState(ss); + } + + protected void verifyReturnType(GenericUDF udf, + String typeStr1, String typeStr2, String expectedTypeStr) throws HiveException { + // Lookup type infos for our input types and expected return type + PrimitiveTypeInfo type1 = TypeInfoFactory.getPrimitiveTypeInfo(typeStr1); + PrimitiveTypeInfo type2 = TypeInfoFactory.getPrimitiveTypeInfo(typeStr2); + PrimitiveTypeInfo expectedType = TypeInfoFactory.getPrimitiveTypeInfo(expectedTypeStr); + + // Initialize UDF which will output the return type for the UDF. + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type1), + PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type2) + }; + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + + Assert.assertEquals("Return type for " + udf.getDisplayString(new String[] {typeStr1, typeStr2}), + expectedType, oi.getTypeInfo()); + } +} diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java index 6193257..1351a9c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java @@ -19,7 +19,9 @@ package org.apache.hadoop.hive.ql.udf.generic; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -37,7 +39,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFOPPlus { +public class TestGenericUDFOPPlus extends TestGenericUDFOPNumeric { @Test public void testBytePlusShort() throws HiveException { @@ -207,4 +209,44 @@ public void testDecimalPlusDecimalSameParams() throws HiveException { Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 2), oi.getTypeInfo()); } + @Test + public void testReturnTypeBackwardCompat() throws Exception { + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, false); + + verifyReturnType(new GenericUDFOPPlus(), "int", "int", "int"); + verifyReturnType(new GenericUDFOPPlus(), "int", "float", "float"); // different from sql compat mode + verifyReturnType(new GenericUDFOPPlus(), "int", "double", "double"); + verifyReturnType(new GenericUDFOPPlus(), "int", "decimal(10,2)", "decimal(13,2)"); + + verifyReturnType(new GenericUDFOPPlus(), "float", "float", "float"); // different from sql compat mode + verifyReturnType(new GenericUDFOPPlus(), "float", "double", "double"); + verifyReturnType(new GenericUDFOPPlus(), "float", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPPlus(), "double", "double", "double"); + verifyReturnType(new GenericUDFOPPlus(), "double", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPPlus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)"); + + // Most tests are done with ANSI SQL mode enabled, set it back to true + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, true); + } + + @Test + public void testReturnTypeAnsiSql() throws Exception { + SessionState.get().getConf().setBoolVar(HiveConf.ConfVars.HIVE_SQL_ANSI, true); + + verifyReturnType(new GenericUDFOPPlus(), "int", "int", "int"); + verifyReturnType(new GenericUDFOPPlus(), "int", "float", "double"); + verifyReturnType(new GenericUDFOPPlus(), "int", "double", "double"); + verifyReturnType(new GenericUDFOPPlus(), "int", "decimal(10,2)", "decimal(13,2)"); + + verifyReturnType(new GenericUDFOPPlus(), "float", "float", "double"); + verifyReturnType(new GenericUDFOPPlus(), "float", "double", "double"); + verifyReturnType(new GenericUDFOPPlus(), "float", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPPlus(), "double", "double", "double"); + verifyReturnType(new GenericUDFOPPlus(), "double", "decimal(10,2)", "double"); + + verifyReturnType(new GenericUDFOPPlus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)"); + } } diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java index 702e3e7..30f04d9 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java @@ -36,7 +36,7 @@ import org.junit.Assert; import org.junit.Test; -public class TestGenericUDFPosMod { +public class TestGenericUDFPosMod extends TestGenericUDFOPNumeric { @Test public void testPosModByZero1() throws HiveException {