diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java index 02d4360091..010b556ece 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -81,6 +81,8 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.junit.rules.TestName; +import static java.sql.ResultSet.CONCUR_READ_ONLY; +import static java.sql.ResultSet.TYPE_SCROLL_INSENSITIVE; import static org.apache.hadoop.hive.conf.SystemVariables.SET_COLUMN_NAME; import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME; import static org.junit.Assert.assertEquals; @@ -3210,6 +3212,32 @@ public void testGetQueryId() throws Exception { stmt1.close(); } + @Test + public void testResultNextAcidTable() throws Exception { + Statement stmt = con.createStatement(TYPE_SCROLL_INSENSITIVE, CONCUR_READ_ONLY); + stmt.execute("set " + ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + "=true"); + stmt.execute("set " + ConfVars.HIVE_TXN_MANAGER.varname + + "=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager"); + stmt.execute("create table tbl (fld int) tblproperties(" + + "'transactional'='true','transactional_properties'='insert_only')"); + stmt.execute("insert into tbl values (1)"); + stmt.execute("insert into tbl values (2)"); + stmt.execute("insert into tbl values (3)"); + stmt.setMaxRows(1); + stmt.setFetchSize(1); + ResultSet res = stmt.executeQuery("select * from tbl"); + boolean moreRow = res.next(); + while (moreRow) { + moreRow = res.next(); + } + res.beforeFirst(); + moreRow = res.next(); + while (moreRow) { + moreRow = res.next(); + } + stmt.close(); + } + // Test that opening a JDBC connection to a non-existent database throws a HiveSQLException @Test(expected = HiveSQLException.class) public void testConnectInvalidDatabase() throws SQLException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java index caa9d83b4f..93b115841b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java @@ -51,6 +51,7 @@ private ListSinkOperator sink; private int totalRows; private static transient final Logger LOG = LoggerFactory.getLogger(FetchTask.class); + JobConf job = null; public FetchTask() { super(); @@ -68,7 +69,11 @@ public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext try { // Create a file system handle - JobConf job = new JobConf(conf); + if (job == null) { + // The job config should be initilaized once per fetch task. In case of refetch, we should use the + // same config. + job = new JobConf(conf); + } Operator source = work.getSource(); if (source instanceof TableScanOperator) {