diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ColumnBuffer.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ColumnBuffer.java index 929c405..3ce1fb3 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ColumnBuffer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ColumnBuffer.java @@ -81,7 +81,7 @@ public ColumnBuffer(Type type, BitSet nulls, Object values) { } else if (type == Type.BIGINT_TYPE) { longVars = (long[]) values; size = longVars.length; - } else if (type == Type.DOUBLE_TYPE) { + } else if (type == Type.DOUBLE_TYPE || type == Type.FLOAT_TYPE) { doubleVars = (double[]) values; size = doubleVars.length; } else if (type == Type.BINARY_TYPE) { @@ -114,6 +114,9 @@ public ColumnBuffer(Type type) { longVars = new long[DEFAULT_SIZE]; break; case FLOAT_TYPE: + type = Type.FLOAT_TYPE; + doubleVars = new double[DEFAULT_SIZE]; + break; case DOUBLE_TYPE: type = Type.DOUBLE_TYPE; doubleVars = new double[DEFAULT_SIZE]; @@ -216,7 +219,7 @@ public ColumnBuffer extractSubset(int start, int end) { size = longVars.length; return subset; } - if (type == Type.DOUBLE_TYPE) { + if (type == Type.DOUBLE_TYPE || type == Type.FLOAT_TYPE) { ColumnBuffer subset = new ColumnBuffer(type, subNulls, Arrays.copyOfRange(doubleVars, start, end)); doubleVars = Arrays.copyOfRange(doubleVars, end, size); @@ -282,6 +285,7 @@ public Object get(int index) { return intVars[index]; case BIGINT_TYPE: return longVars[index]; + case FLOAT_TYPE: case DOUBLE_TYPE: return doubleVars[index]; case STRING_TYPE: @@ -320,6 +324,7 @@ public TColumn toTColumn() { value .setI64Val(new TI64Column(Longs.asList(Arrays.copyOfRange(longVars, 0, size)), nullMasks)); break; + case FLOAT_TYPE: case DOUBLE_TYPE: value.setDoubleVal(new TDoubleColumn(Doubles.asList(Arrays.copyOfRange(doubleVars, 0, size)), nullMasks)); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index 815ccfa..7449197 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -537,6 +537,66 @@ public void testJoinThriftSerializeInTasks() throws Exception { stmt.close(); } + @Test + public void testEmptyResultsetThriftSerializeInTasks() throws Exception { + //stop HiveServer2 + if (miniHS2.isStarted()) { + miniHS2.stop(); + } + + HiveConf conf = new HiveConf(); + String userName; + setSerializeInTasksInConf(conf); + miniHS2 = new MiniHS2(conf); + Map confOverlay = new HashMap(); + miniHS2.start(confOverlay); + + userName = System.getProperty("user.name"); + hs2Conn = getConnection(miniHS2.getJdbcURL(), userName, "password"); + Statement stmt = hs2Conn.createStatement(); + stmt.execute("drop table if exists testThriftSerializeShow1"); + stmt.execute("drop table if exists testThriftSerializeShow2"); + stmt.execute("create table testThriftSerializeShow1 (a int)"); + stmt.execute("create table testThriftSerializeShow2 (b int)"); + stmt.execute("insert into testThriftSerializeShow1 values (1)"); + stmt.execute("insert into testThriftSerializeShow2 values (2)"); + ResultSet rs = stmt.executeQuery("select * from testThriftSerializeShow1 inner join testThriftSerializeShow2 where testThriftSerializeShow1.a=testThriftSerializeShow2.b"); + assertTrue(!rs.next()); + stmt.execute("drop table testThriftSerializeShow1"); + stmt.execute("drop table testThriftSerializeShow2"); + stmt.close(); + } + + @Test + public void testFloatCast2DoubleThriftSerializeInTasks() throws Exception { + //stop HiveServer2 + if (miniHS2.isStarted()) { + miniHS2.stop(); + } + + HiveConf conf = new HiveConf(); + String userName; + setSerializeInTasksInConf(conf); + miniHS2 = new MiniHS2(conf); + Map confOverlay = new HashMap(); + miniHS2.start(confOverlay); + + userName = System.getProperty("user.name"); + hs2Conn = getConnection(miniHS2.getJdbcURL(), userName, "password"); + Statement stmt = hs2Conn.createStatement(); + stmt.execute("drop table if exists testThriftSerializeShow1"); + stmt.execute("drop table if exists testThriftSerializeShow2"); + stmt.execute("create table testThriftSerializeShow1 (a float)"); + stmt.execute("create table testThriftSerializeShow2 (b double)"); + stmt.execute("insert into testThriftSerializeShow1 values (1.1), (2.2), (3.3)"); + stmt.execute("insert into testThriftSerializeShow2 values (2.2), (3.3), (4.4)"); + ResultSet rs = stmt.executeQuery("select * from testThriftSerializeShow1 inner join testThriftSerializeShow2 where testThriftSerializeShow1.a=testThriftSerializeShow2.b"); + assertTrue(!rs.next()); + stmt.execute("drop table testThriftSerializeShow1"); + stmt.execute("drop table testThriftSerializeShow2"); + stmt.close(); + } + /** * Tests the creation of the 3 scratch dirs: hdfs, local, downloaded resources (which is also local). * 1. Test with doAs=false: open a new JDBC session and verify the presence of directories/permissions diff --git a/service/src/test/org/apache/hive/service/cli/TestColumn.java b/service/src/test/org/apache/hive/service/cli/TestColumn.java index 9980aba..6589fc3 100644 --- a/service/src/test/org/apache/hive/service/cli/TestColumn.java +++ b/service/src/test/org/apache/hive/service/cli/TestColumn.java @@ -80,7 +80,7 @@ public void testFloatAndDoubleValues() { floatColumn.addValue(Type.FLOAT_TYPE, 2.033f); // FLOAT_TYPE is treated as DOUBLE_TYPE - assertEquals(Type.DOUBLE_TYPE, floatColumn.getType()); + assertEquals(Type.FLOAT_TYPE, floatColumn.getType()); assertEquals(2, floatColumn.size()); assertEquals(1.1, floatColumn.get(0)); assertEquals(2.033, floatColumn.get(1));