Index: ql/src/test/results/clientnegative/compare_string_bigint.q.out =================================================================== --- ql/src/test/results/clientnegative/compare_string_bigint.q.out (revision 0) +++ ql/src/test/results/clientnegative/compare_string_bigint.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments ''1'': In strict mode, comparing bigints and strings is not allowed, it may result in a loss of precision. If you really want to perform the operation, set hive.mapred.mode=nonstrict Index: ql/src/test/results/clientnegative/compare_double_bigint.q.out =================================================================== --- ql/src/test/results/clientnegative/compare_double_bigint.q.out (revision 0) +++ ql/src/test/results/clientnegative/compare_double_bigint.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments '1.0': In strict mode, comparing bigints and doubles is not allowed, it may result in a loss of precision. If you really want to perform the operation, set hive.mapred.mode=nonstrict Index: ql/src/test/queries/clientnegative/compare_double_bigint.q =================================================================== --- ql/src/test/queries/clientnegative/compare_double_bigint.q (revision 0) +++ ql/src/test/queries/clientnegative/compare_double_bigint.q (revision 0) @@ -0,0 +1,5 @@ +set hive.mapred.mode=strict; + +-- This should fail until we fix the issue with precision when casting a bigint to a double + +select * from src where cast(1 as bigint) = 1.0 limit 10; \ No newline at end of file Index: ql/src/test/queries/clientnegative/compare_string_bigint.q =================================================================== --- ql/src/test/queries/clientnegative/compare_string_bigint.q (revision 0) +++ ql/src/test/queries/clientnegative/compare_string_bigint.q (revision 0) @@ -0,0 +1,5 @@ +set hive.mapred.mode=strict; + +--This should fail until we fix the issue with precision when casting a bigint to a double + +select * from src where cast(1 as bigint) = '1' limit 10; \ No newline at end of file Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (revision 1157946) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (working copy) @@ -180,6 +180,12 @@ INCOMPATIBLE_SCHEMA("The existing table is not compatible with the import spec. "), EXIM_FOR_NON_NATIVE("Export/Import cannot be done for a non-native table. "), INSERT_INTO_BUCKETIZED_TABLE("Bucketized tables do not support INSERT INTO:"), + NO_COMPARE_BIGINT_STRING("In strict mode, comparing bigints and strings is not allowed, " + + "it may result in a loss of precision. " + + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"), + NO_COMPARE_BIGINT_DOUBLE("In strict mode, comparing bigints and doubles is not allowed, " + + "it may result in a loss of precision. " + + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"), ; private String mesg; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java (revision 1157946) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java (working copy) @@ -18,14 +18,19 @@ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; +import org.apache.hadoop.hive.ql.parse.ErrorMsg; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector; @@ -51,6 +56,8 @@ COMPARE_BOOL, SAME_TYPE, NEED_CONVERT } + static final private Log LOG = LogFactory.getLog(GenericUDFBaseCompare.class.getName()); + protected String opName; protected String opDisplayName; @@ -134,6 +141,28 @@ if (oiTypeInfo0 != oiTypeInfo1) { compareType = CompareType.NEED_CONVERT; + HiveConf conf = null; + if (SessionState.get() != null) { + conf = SessionState.get().getConf(); + } + + // For now, if a bigint is going to be cast to a double throw an error or warning + if ((oiTypeInfo0.equals(TypeInfoFactory.stringTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) || + (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.stringTypeInfo))) { + if (conf != null && conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict")) { + throw new UDFArgumentException(ErrorMsg.NO_COMPARE_BIGINT_STRING.getMsg()); + } else { + LOG.warn("Comparing a bigint and a string may result in a loss of precision."); + } + } else if ((oiTypeInfo0.equals(TypeInfoFactory.doubleTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) || + (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.doubleTypeInfo))) { + if (conf != null && conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict")) { + throw new UDFArgumentException(ErrorMsg.NO_COMPARE_BIGINT_DOUBLE.getMsg()); + } else { + LOG.warn("Comparing a bigint and a double may result in a loss of precision."); + } + } + // If either argument is a string, we convert to a double because a number // in string form should always be convertible into a double if (oiTypeInfo0.equals(TypeInfoFactory.stringTypeInfo)