diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java b/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java index 7797a57216..ef3fe3272a 100644 --- a/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java +++ b/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java @@ -670,7 +670,7 @@ public void testPiArcsine() { total.addDestructive(current, SCALE); } - assertTrue(total.toFormalString().startsWith("3.141592653589793238462")); + assertTrue(total.toFormalString().startsWith("3.14159")); } @Test @@ -680,12 +680,12 @@ public void testDoubleValue() { Decimal128 three = new Decimal128(3); Decimal128 four = new Decimal128(9); Decimal128.divide(three, four, quotient, (short) 38); - assertEquals(0.33333333333333333333333333d, quotient.doubleValue(), + assertEquals(0.333333d, quotient.doubleValue(), 0.0000000000000000000000001d); Decimal128 minusThree = new Decimal128(-3); Decimal128.divide(minusThree, four, quotient, (short) 38); - assertEquals(-0.33333333333333333333333333d, quotient.doubleValue(), + assertEquals(-0.333333d, quotient.doubleValue(), 0.0000000000000000000000001d); } @@ -696,11 +696,11 @@ public void testFloatValue() { Decimal128 three = new Decimal128(3); Decimal128 four = new Decimal128(9); Decimal128.divide(three, four, quotient, (short) 38); - assertEquals(0.3333333333333333f, quotient.floatValue(), 0.00000000001f); + assertEquals(0.333333f, quotient.floatValue(), 0.00000000001f); Decimal128 minusThree = new Decimal128(-3); Decimal128.divide(minusThree, four, quotient, (short) 38); - assertEquals(-0.333333333333333f, quotient.floatValue(), 0.00000000001f); + assertEquals(-0.333333f, quotient.floatValue(), 0.00000000001f); } @Test diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java index 523a1a4f0b..426371e1af 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java @@ -124,7 +124,7 @@ public void testLongDivideDecimal() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(33, 10), oi.getTypeInfo()); HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args); - Assert.assertEquals(HiveDecimal.create("0.4426096949"), res.getHiveDecimal()); + Assert.assertEquals(HiveDecimal.create("0.44261"), res.getHiveDecimal()); } @Test diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimalImpl.java b/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimalImpl.java index f733c1ecb5..45541dfddd 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimalImpl.java +++ b/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimalImpl.java @@ -863,6 +863,7 @@ public static boolean fastSetFromBigDecimal( // System.out.println("FAST_SET_FROM_BIG_DECIMAL adjusted for zeroes/scale " + bigDecimal + " scale " + bigDecimal.scale()); BigInteger unscaledValue = bigDecimal.unscaledValue(); + int precision = bigDecimal.precision(); // System.out.println("FAST_SET_FROM_BIG_DECIMAL unscaledValue " + unscaledValue + " length " + unscaledValue.toString().length()); final int scale = bigDecimal.scale(); @@ -881,7 +882,7 @@ public static boolean fastSetFromBigDecimal( return true; } // This method will scale down and round to fit, if necessary. - if (!fastSetFromBigInteger(unscaledValue, scale, fastResult)) { + if (!fastSetFromBigInteger(unscaledValue, scale, precision, fastResult)) { return false; } return true; @@ -1128,6 +1129,24 @@ public static boolean fastSetFromBigInteger( */ public static boolean fastSetFromBigInteger( BigInteger bigInteger, int scale, FastHiveDecimal fastResult) { + // Poor performance, because the precision will be calculated by bigInteger.toString() + return fastSetFromBigInteger(bigInteger, scale, -1, fastResult); + } + + /** + * Creates a fast decimal from a BigInteger with a specified scale. + * + * NOTE: The fastSetFromBigInteger method requires the caller to pass a fastResult + * parameter has been reset for better performance. + * + * @param bigInteger the value to set as an integer + * @param scale the scale to use + * @param precision the precision to use + * @param fastResult an object to reuse + * @return True if the BigInteger and scale were successfully converted to a decimal. + */ + public static boolean fastSetFromBigInteger( + BigInteger bigInteger, int scale, int precision, FastHiveDecimal fastResult) { if (scale < 0) { @@ -1150,8 +1169,10 @@ public static boolean fastSetFromBigInteger( bigInteger = bigInteger.negate(); } - // A slow way to get the number of decimal digits. - int precision = bigInteger.toString().length(); + if (precision < 0) { + // A slow way to get the number of decimal digits. + precision = bigInteger.toString().length(); + } // System.out.println("FAST_SET_FROM_BIG_INTEGER adjusted bigInteger " + bigInteger + " precision " + precision); @@ -8802,8 +8823,14 @@ public static boolean fastDivide( BigDecimal divisor = fastBigDecimalValue( rightSignum, rightFast0, rightFast1, rightFast2, rightIntegerDigitCount, rightScale); + // the formula to evaluate the scale of divide operation is according to the original design: + // https://cwiki.apache.org/confluence/download/attachments/27362075/Hive_Decimal_Precision_Scale_Support.pdf + int scale = Math.max(6, leftScale + rightIntegerDigitCount + 1); + if (scale > HiveDecimal.MAX_SCALE) { + scale = HiveDecimal.MAX_SCALE; + } BigDecimal quotient = - denominator.divide(divisor, HiveDecimal.MAX_SCALE, BigDecimal.ROUND_HALF_UP); + denominator.divide(divisor, scale, BigDecimal.ROUND_HALF_UP); if (!fastSetFromBigDecimal( quotient, diff --git a/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java b/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java index f8a36e5386..0ca19c51b7 100644 --- a/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java +++ b/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java @@ -731,7 +731,7 @@ public void testSingleWordDivision() { dec1 = HiveDecimal.create("1"); dec2 = HiveDecimal.create("3"); resultDec = dec1.divide(dec2); - Assert.assertEquals("0.33333333333333333333333333333333333333", resultDec.toString()); // UNDONE + Assert.assertEquals("0.333333", resultDec.toString()); // UNDONE //--------------------------------------------------- oldDec1 = HiveDecimalV1.create("1"); @@ -742,7 +742,7 @@ public void testSingleWordDivision() { dec1 = HiveDecimal.create("1"); dec2 = HiveDecimal.create("9"); resultDec = dec1.divide(dec2); - Assert.assertEquals("0.11111111111111111111111111111111111111", resultDec.toString()); // UNDONE + Assert.assertEquals("0.111111", resultDec.toString()); // UNDONE //--------------------------------------------------- oldDec1 = HiveDecimalV1.create("22"); @@ -753,7 +753,7 @@ public void testSingleWordDivision() { dec1 = HiveDecimal.create("22"); dec2 = HiveDecimal.create("7"); resultDec = dec1.divide(dec2); - Assert.assertEquals("3.1428571428571428571428571428571428571", resultDec.toString()); // UNDONE + Assert.assertEquals("3.142857", resultDec.toString()); // UNDONE //--------------------------------------------------- oldDec1 = HiveDecimalV1.create("1"); @@ -764,7 +764,7 @@ public void testSingleWordDivision() { dec1 = HiveDecimal.create("1"); dec2 = HiveDecimal.create("81"); resultDec = dec1.divide(dec2); - Assert.assertEquals("0.01234567901234567901234567901234567901", resultDec.toString()); // UNDONE + Assert.assertEquals("0.012346", resultDec.toString()); // UNDONE //--------------------------------------------------- oldDec1 = HiveDecimalV1.create("425");