diff --git common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java index ad09015..a8007b1 100644 --- common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java +++ common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java @@ -52,6 +52,8 @@ public static final int ROUND_CEILING = BigDecimal.ROUND_CEILING; public static final int ROUND_HALF_UP = BigDecimal.ROUND_HALF_UP; + public static byte[] nullBytes = {0x0, 0x0, 0x0, 0x0}; + private BigDecimal bd = BigDecimal.ZERO; private HiveDecimal(BigDecimal bd) { diff --git data/files/decimal_10_0.txt data/files/decimal_10_0.txt new file mode 100644 index 0000000..7e1dd18 --- /dev/null +++ data/files/decimal_10_0.txt @@ -0,0 +1 @@ +9999999999.999 diff --git data/files/decimal_9_0.txt data/files/decimal_9_0.txt new file mode 100644 index 0000000..62010ba --- /dev/null +++ data/files/decimal_9_0.txt @@ -0,0 +1 @@ +999999999.999 diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDecimal.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDecimal.java new file mode 100644 index 0000000..96f4564 --- /dev/null +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDecimal.java @@ -0,0 +1,139 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql; + +import junit.framework.Assert; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.log4j.Logger; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +public class TestDecimal { + protected static HiveConf conf; + protected static Driver driver; + protected static String dataFileDir; + protected static Path dec9_0FilePath; + protected static Path dec10_0FilePath; + protected static FileSystem fs; + + protected static Path warehouseDir; + protected static Path baseDfsDir; + + private Logger log = Logger.getLogger(TestDecimal.class); + private static String DB = "testdecimal"; + + @BeforeClass + public static void setup() throws Exception { + conf = new HiveConf(TestDecimal.class); + MiniDFSShim dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null); + fs = dfs.getFileSystem(); + baseDfsDir = new Path(new Path(fs.getUri()), "/base"); + fs.mkdirs(baseDfsDir); + warehouseDir = new Path(baseDfsDir, "warehouse"); + fs.mkdirs(warehouseDir); + conf.setVar(ConfVars.METASTOREWAREHOUSE, warehouseDir.toString()); + + dataFileDir = conf.get("test.data.files").replace('\\', '/') + .replace("c:", ""); + dec9_0FilePath = new Path(dataFileDir, "decimal_9_0.txt"); + dec10_0FilePath = new Path(dataFileDir, "decimal_10_0.txt"); + + //set hive conf vars + conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + int port = MetaStoreUtils.findFreePort(); + MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge()); + + SessionState.start(new CliSessionState(conf)); + driver = new Driver(conf); + + CommandProcessorResponse ret = driver.run("CREATE DATABASE " + DB); + Assert.assertEquals(0, ret.getResponseCode()); + + ret = driver.run("USE " + DB); + Assert.assertEquals(0, ret.getResponseCode()); + } + + @AfterClass + public static void cleanUp() throws Exception { + driver.run("drop database if exists " + DB + "cascade"); + } + + @Test + public void testDefaultDecimal93() throws Exception { + String table = "defaultDecimal93"; + + CommandProcessorResponse ret = driver.run("CREATE table " + table + " (dec decimal)"); + Assert.assertEquals(0, ret.getResponseCode()); + + ret = driver.run("LOAD DATA LOCAL INPATH '" + dec9_0FilePath + "' INTO table " + table); + Assert.assertEquals(0, ret.getResponseCode()); + + ret = driver.run("select dec from " + table); + Assert.assertEquals(0, ret.getResponseCode()); + + List resList = new ArrayList(); + Assert.assertEquals(true, driver.getResults(resList)); + for (String str : resList) { + log.info("reslist : " + str); + } + + Assert.assertEquals(1000000000, Integer.parseInt(resList.get(resList.size() - 1))); + } + + @Test + public void testNullDecimal() throws Exception { + String testDb = "decdb"; + String table = "nullDecimal"; + CommandProcessorResponse ret = driver.run("CREATE DATABASE " + testDb); + Assert.assertEquals(0, ret.getResponseCode()); + + ret = driver.run("USE " + testDb); + Assert.assertEquals(0, ret.getResponseCode()); + + ret = driver.run("CREATE table " + table + " (dec decimal)"); + Assert.assertEquals(0, ret.getResponseCode()); + + ret = driver.run("LOAD DATA LOCAL INPATH '" + dec10_0FilePath + "' INTO table " + table); + Assert.assertEquals(0, ret.getResponseCode()); + + ret = driver.run("select dec from " + table); + Assert.assertEquals(0, ret.getResponseCode()); + + List resList = new ArrayList(); + Assert.assertEquals(true, driver.getResults(resList)); + for (String str : resList) { + log.info("reslist : " + str); + } + + Assert.assertEquals("NULL", resList.get(resList.size() - 1)); + } +} diff --git serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java index 78cc381..c252e6f 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java @@ -17,6 +17,9 @@ */ package org.apache.hadoop.hive.serde2.lazy; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; import org.apache.commons.logging.Log; @@ -93,4 +96,12 @@ public HiveDecimalWritable getWritableObject() { return data; } + public static void writeUTF8(OutputStream out, HiveDecimal i) throws IOException { + if (i == null) { + out.write(HiveDecimal.nullBytes); + } else { + ByteBuffer b = Text.encode(i.toString()); + out.write(b.array(), 0, b.limit()); + } + } } diff --git serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java index 5a46237..1d62422 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java @@ -28,7 +28,6 @@ import java.util.Properties; import org.apache.commons.codec.binary.Base64; -import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; @@ -261,9 +260,8 @@ public static void writePrimitiveUTF8(OutputStream out, Object o, break; } case DECIMAL: { - HiveDecimal bd = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o); - ByteBuffer b = Text.encode(bd.toString()); - out.write(b.array(), 0, b.limit()); + LazyHiveDecimal.writeUTF8(out, + ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o)); break; } default: {