diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index 0249566..e8e57ef 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -512,6 +512,11 @@ private void setSerializeInTasksInConf(HiveConf conf) { conf.setInt("hive.server2.thrift.resultset.max.fetch.size", 1000); } + private void unsetSerializeInTasksInConf(HiveConf conf) { + conf.setBoolean("hive.server2.thrift.resultset.serialize.in.tasks", false); + conf.unset("hive.server2.thrift.resultset.max.fetch.size"); + } + @Test public void testMetadataQueriesWithSerializeThriftInTasks() throws Exception { //stop HiveServer2 @@ -674,6 +679,45 @@ public void testFloatCast2DoubleThriftSerializeInTasks() throws Exception { stmt.close(); } + @Test + public void testEnableThriftSerializeInTasks() throws Exception { + //stop HiveServer2 + if (miniHS2.isStarted()) { + miniHS2.stop(); + } + + HiveConf conf = new HiveConf(); + String userName; + setSerializeInTasksInConf(conf); + miniHS2 = new MiniHS2(conf); + Map confOverlay = new HashMap(); + miniHS2.start(confOverlay); + + userName = System.getProperty("user.name"); + hs2Conn = getConnection(miniHS2.getJdbcURL(), userName, "password"); + Statement stmt = hs2Conn.createStatement(); + stmt.execute("drop table if exists testThriftSerializeShow1"); + stmt.execute("drop table if exists testThriftSerializeShow2"); + stmt.execute("create table testThriftSerializeShow1 (a int)"); + stmt.execute("create table testThriftSerializeShow2 (b int)"); + stmt.execute("insert into testThriftSerializeShow1 values (1)"); + stmt.execute("insert into testThriftSerializeShow2 values (2)"); + ResultSet rs = stmt.executeQuery("select * from testThriftSerializeShow1 inner join testThriftSerializeShow2 where testThriftSerializeShow1.a=testThriftSerializeShow2.b"); + assertTrue(!rs.next()); + + unsetSerializeInTasksInConf(conf); + rs = stmt.executeQuery("select * from testThriftSerializeShow1 inner join testThriftSerializeShow2 where testThriftSerializeShow1.a=testThriftSerializeShow2.b"); + assertTrue(!rs.next()); + + setSerializeInTasksInConf(conf); + rs = stmt.executeQuery("select * from testThriftSerializeShow1 inner join testThriftSerializeShow2 where testThriftSerializeShow1.a=testThriftSerializeShow2.b"); + assertTrue(!rs.next()); + + stmt.execute("drop table testThriftSerializeShow1"); + stmt.execute("drop table testThriftSerializeShow2"); + stmt.close(); + } + /** * Tests the creation of the 3 scratch dirs: hdfs, local, downloaded resources (which is also local). * 1. Test with doAs=false: open a new JDBC session and verify the presence of directories/permissions diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 66589fe..2fbd82c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -6871,8 +6871,7 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) if (tblDesc == null) { if (qb.getIsQuery()) { String fileFormat; - if (SessionState.get().isHiveServerQuery() && - conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_SERIALIZE_IN_TASKS)) { + if (SessionState.get().getIsUsingThriftJDBCBinarySerDe()) { fileFormat = "SequenceFile"; HiveConf.setVar(conf, HiveConf.ConfVars.HIVEQUERYRESULTFILEFORMAT, fileFormat); table_desc= diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 114fa2f..af55477 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -155,7 +155,7 @@ public void compile(final ParseContext pCtx, final List sessionConfMap) throws HiveSQLException { sessionState.setUserIpAddress(ipAddress); sessionState.setIsHiveServerQuery(true); sessionState.setForwardedAddresses(SessionManager.getForwardedAddresses()); + sessionState.setIsUsingThriftJDBCBinarySerDe(updateIsUsingThriftJDBCBinarySerDe()); SessionState.start(sessionState); try { sessionState.loadAuxJars(); @@ -189,7 +190,7 @@ public void open(Map sessionConfMap) throws HiveSQLException { lastIdleTime = lastAccessTime; } - /** +/** * It is used for processing hiverc file from HiveServer2 side. */ private class GlobalHivercFileProcessor extends HiveFileProcessor { @@ -268,6 +269,11 @@ private void configureSession(Map sessionConfMap) throws HiveSQL } } + private boolean updateIsUsingThriftJDBCBinarySerDe() { + return (8 <= getProtocolVersion().getValue()) + && sessionConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_SERIALIZE_IN_TASKS); + } + @Override public void setOperationLogSessionDir(File operationLogRootDir) { if (!operationLogRootDir.exists()) { @@ -355,6 +361,7 @@ private synchronized void acquireAfterOpLock(boolean userAccess) { // stored in the thread local for the handler thread. SessionState.setCurrentSessionState(sessionState); sessionState.setForwardedAddresses(SessionManager.getForwardedAddresses()); + sessionState.setIsUsingThriftJDBCBinarySerDe(updateIsUsingThriftJDBCBinarySerDe()); if (userAccess) { lastAccessTime = System.currentTimeMillis(); }