Index: jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java =================================================================== --- jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (revision 1081785) +++ jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (working copy) @@ -192,21 +192,25 @@ } public final void testSelectAll() throws Exception { - doTestSelectAll(tableName, -1); // tests not setting maxRows (return all) - doTestSelectAll(tableName, 0); // tests setting maxRows to 0 (return all) + doTestSelectAll(tableName, -1, -1); // tests not setting maxRows (return all) + doTestSelectAll(tableName, 0, -1); // tests setting maxRows to 0 (return all) } public final void testSelectAllPartioned() throws Exception { - doTestSelectAll(partitionedTableName, -1); // tests not setting maxRows + doTestSelectAll(partitionedTableName, -1, -1); // tests not setting maxRows // (return all) - doTestSelectAll(partitionedTableName, 0); // tests setting maxRows to 0 + doTestSelectAll(partitionedTableName, 0, -1); // tests setting maxRows to 0 // (return all) } public final void testSelectAllMaxRows() throws Exception { - doTestSelectAll(tableName, 100); + doTestSelectAll(tableName, 100, -1); } + public final void testSelectAllFetchSize() throws Exception { + doTestSelectAll(tableName, 100, 20); + } + public void testDataTypes() throws Exception { Statement stmt = con.createStatement(); @@ -267,11 +271,15 @@ assertFalse(res.next()); } - private void doTestSelectAll(String tableName, int maxRows) throws Exception { + private void doTestSelectAll(String tableName, int maxRows, int fetchSize) throws Exception { Statement stmt = con.createStatement(); if (maxRows >= 0) { stmt.setMaxRows(maxRows); } + if (fetchSize > 0) { + stmt.setFetchSize(fetchSize); + assertEquals(fetchSize, stmt.getFetchSize()); + } // JDBC says that 0 means return all, which is the default int expectedMaxRows = maxRows < 1 ? 0 : maxRows; Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java (revision 1081785) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java (working copy) @@ -33,6 +33,8 @@ public class HiveStatement implements java.sql.Statement { private JdbcSessionState session; private HiveInterface client; + private int fetchSize = 50; + /** * We need to keep a reference to the result set to support the following: * @@ -191,6 +193,7 @@ throw new SQLException(ex.toString(), "08S01"); } resultSet = new HiveQueryResultSet(client, maxRows); + resultSet.setFetchSize(fetchSize); return resultSet; } @@ -266,7 +269,7 @@ */ public int getFetchSize() throws SQLException { - throw new SQLException("Method not supported"); + return fetchSize; } /* @@ -446,7 +449,7 @@ */ public void setFetchSize(int rows) throws SQLException { - throw new SQLException("Method not supported"); + fetchSize = rows; } /* Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveQueryResultSet.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveQueryResultSet.java (revision 1081785) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveQueryResultSet.java (working copy) @@ -54,7 +54,10 @@ private int maxRows = 0; private int rowsFetched = 0; + private int fetchSize = 50; + private List fetchedRows; + public HiveQueryResultSet(HiveInterface client, int maxRows) throws SQLException { this.client = client; this.maxRows = maxRows; @@ -96,18 +99,18 @@ serde = new LazySimpleSerDe(); Properties props = new Properties(); if (names.length() > 0) { - LOG.info("Column names: " + names); + LOG.debug("Column names: " + names); props.setProperty(Constants.LIST_COLUMNS, names); } if (types.length() > 0) { - LOG.info("Column types: " + types); + LOG.debug("Column types: " + types); props.setProperty(Constants.LIST_COLUMN_TYPES, types); } serde.initialize(new Configuration(), props); } catch (Exception ex) { ex.printStackTrace(); - throw new SQLException("Could not create ResultSet: " + ex.getMessage()); + throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex); } } @@ -128,9 +131,19 @@ return false; } - String rowStr = ""; try { - rowStr = (String) client.fetchOne(); + if (fetchedRows == null || fetchedRows.size()==0) { + fetchedRows = client.fetchN(fetchSize); + } + + String rowStr = ""; + if (fetchedRows.size()>0) { + rowStr = fetchedRows.get(0); + fetchedRows.remove(0); + } else { + return false; + } + rowsFetched++; if (LOG.isDebugEnabled()) { LOG.debug("Fetched row string: " + rowStr); @@ -165,6 +178,16 @@ return true; } + @Override + public void setFetchSize(int rows) throws SQLException { + fetchSize = rows; + } + + @Override + public int getFetchSize() throws SQLException { + return fetchSize; + } + /** * Convert a LazyObject to a standard Java object in compliance with JDBC 3.0 (see JDBC 3.0 * Specification, Table B-3: Mapping from JDBC Types to Java Object Types).