Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java (revision 796955) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java Thu Jul 23 10:52:08 PDT 2009 @@ -14,6 +14,30 @@ JdbcSessionState session; HiveInterface client; /** + * We need to keep a reference to the result set to support the following: + * + * statement.execute(String sql); + * statement.getResultSet(); + * + */ + ResultSet resultSet = null; + + /** + * The maximum number of rows this statement should return (0 => all rows) + */ + int maxRows = 0; + + /** + * Add SQLWarnings to the warningChain if needed + */ + SQLWarning warningChain = null; + + /** + * Keep state so we can fail certain calls made after close(); + */ + boolean isClosed = false; + + /** * */ public HiveStatement(JdbcSessionState session, HiveInterface client) { @@ -53,8 +77,7 @@ */ public void clearWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + this.warningChain = null; } /* (non-Javadoc) @@ -62,8 +85,10 @@ */ public void close() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + //TODO: how to properly shut down the client? + client = null; + resultSet = null; + isClosed = true; } /* (non-Javadoc) @@ -71,7 +96,11 @@ */ public boolean execute(String sql) throws SQLException { - return true; + ResultSet rs = executeQuery(sql); + + //TODO: this should really check if there are results, but there's no easy + //way to do that without calling rs.next(); + return rs != null; } /* (non-Javadoc) @@ -117,12 +146,17 @@ */ public ResultSet executeQuery(String sql) throws SQLException { + if (this.isClosed) + throw new SQLException("Can't execute after statement has been closed"); + try { + this.resultSet = null; client.execute(sql); } catch (Exception ex) { throw new SQLException(ex.toString()); } - return new HiveResultSet(client); + this.resultSet = new HiveResultSet(client, maxRows); + return this.resultSet; } /* (non-Javadoc) @@ -218,8 +252,7 @@ */ public int getMaxRows() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.maxRows; } /* (non-Javadoc) @@ -254,8 +287,7 @@ */ public ResultSet getResultSet() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.resultSet; } /* (non-Javadoc) @@ -290,8 +322,7 @@ */ public int getUpdateCount() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return 0; } /* (non-Javadoc) @@ -299,8 +330,7 @@ */ public SQLWarning getWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.warningChain; } /* (non-Javadoc) @@ -308,8 +338,7 @@ */ public boolean isClosed() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.isClosed; } /* (non-Javadoc) @@ -371,8 +400,8 @@ */ public void setMaxRows(int max) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + if (max < 0) throw new SQLException("max must be >= 0"); + this.maxRows = max; } /* (non-Javadoc) Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java (revision 796955) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java Thu Jul 23 10:52:08 PDT 2009 @@ -201,8 +201,7 @@ */ public String getDatabaseProductName() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return "Hive"; } /* (non-Javadoc) @@ -210,8 +209,7 @@ */ public String getDatabaseProductVersion() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return "0"; } /* (non-Javadoc) @@ -246,7 +244,7 @@ */ public String getDriverName() throws SQLException { - return new String("hive"); + return "hive"; } /* (non-Javadoc) @@ -254,7 +252,7 @@ */ public String getDriverVersion() throws SQLException { - return new String("0"); + return "0"; } /* (non-Javadoc) @@ -566,8 +564,8 @@ public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + //TODO: return empty result set here + return null; } /* (non-Javadoc) @@ -1050,8 +1048,7 @@ */ public boolean supportsCatalogsInTableDefinitions() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) @@ -1260,8 +1257,7 @@ */ public boolean supportsMultipleResultSets() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) @@ -1404,8 +1400,7 @@ */ public boolean supportsSchemasInDataManipulation() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) @@ -1440,8 +1435,7 @@ */ public boolean supportsSchemasInTableDefinitions() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) @@ -1476,8 +1470,7 @@ */ public boolean supportsStoredProcedures() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java (revision 796955) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java Thu Jul 23 10:52:08 PDT 2009 @@ -34,7 +34,10 @@ public class HiveConnection implements java.sql.Connection { JdbcSessionState session; + private TTransport transport; private HiveInterface client; + boolean isClosed = true; + SQLWarning warningChain = null; private static final String URI_PREFIX = "jdbc:hive://"; /** @@ -71,11 +74,12 @@ } catch (Exception e) { } - TTransport transport = new TSocket(host, port); + transport = new TSocket(host, port); TProtocol protocol = new TBinaryProtocol(transport); client = new HiveClient(protocol); transport.open(); } + isClosed = false; } /* (non-Javadoc) @@ -83,8 +87,7 @@ */ public void clearWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + this.warningChain = null; } /* (non-Javadoc) @@ -92,9 +95,13 @@ */ public void close() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + try { + if (transport != null) transport.close(); - } + } + finally { + isClosed = true; + } + } /* (non-Javadoc) * @see java.sql.Connection#commit() @@ -158,6 +165,7 @@ */ public Statement createStatement() throws SQLException { + if (isClosed) throw new SQLException("Can't create Statement, connection is closed"); return new HiveStatement(session, client); } @@ -268,8 +276,7 @@ */ public SQLWarning getWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.warningChain; } /* (non-Javadoc) @@ -277,8 +284,7 @@ */ public boolean isClosed() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return isClosed; } /* (non-Javadoc) Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java (revision 796955) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java Thu Jul 23 11:07:46 PDT 2009 @@ -43,20 +43,31 @@ List columnNames; List columnTypes; + SQLWarning warningChain = null; + boolean wasNull = false; + int maxRows = 0; + int rowsFetched = 0; + /** * */ @SuppressWarnings("unchecked") - public HiveResultSet(HiveInterface client) { + public HiveResultSet(HiveInterface client, int maxRows) throws SQLException { this.client = client; this.row = new ArrayList(); + this.maxRows = maxRows; initDynamicSerde(); } + @SuppressWarnings("unchecked") + public HiveResultSet(HiveInterface client) throws SQLException { + this(client, 0); + } + /** * Instantiate the dynamic serde used to deserialize the result row */ - public void initDynamicSerde() { + public void initDynamicSerde() throws SQLException { try { Schema fullSchema = client.getSchema(); List schema = fullSchema.getFieldSchemas(); @@ -70,6 +81,8 @@ for (int pos = 0; pos < schema.size(); pos++) { if (pos != 0) serDDL = serDDL.concat(","); + columnTypes.add(schema.get(pos).getType()); + columnNames.add(schema.get(pos).getName()); serDDL = serDDL.concat(schema.get(pos).getType()); serDDL = serDDL.concat(" "); serDDL = serDDL.concat(schema.get(pos).getName()); @@ -89,8 +102,7 @@ ds.initialize(new Configuration(), dsp); } catch (Exception ex) { ex.printStackTrace(); - System.exit(1); - // TODO: Decide what to do here. + throw new SQLException("Could not create ResultSet: " + ex.getMessage()); } } @@ -135,8 +147,7 @@ */ public void clearWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + warningChain = null; } /* (non-Javadoc) @@ -290,7 +301,7 @@ */ public boolean getBoolean(int columnIndex) throws SQLException { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).intValue() != 0; } @@ -311,7 +322,7 @@ */ public byte getByte(int columnIndex) throws SQLException { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).byteValue(); } @@ -404,8 +415,11 @@ */ public Date getDate(int columnIndex) throws SQLException { + Object obj = getObject(columnIndex); + if (obj == null) return null; + try { - return Date.valueOf((String)row.get(columnIndex-1)); + return Date.valueOf((String)obj); } catch (Exception e) { throw new SQLException("Cannot convert column " + columnIndex + " to date: " + e.toString()); @@ -445,7 +459,7 @@ public double getDouble(int columnIndex) throws SQLException { try { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).doubleValue(); } @@ -489,7 +503,7 @@ public float getFloat(int columnIndex) throws SQLException { try { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).floatValue(); } @@ -523,7 +537,7 @@ public int getInt(int columnIndex) throws SQLException { try { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).intValue(); } @@ -549,7 +563,7 @@ public long getLong(int columnIndex) throws SQLException { try { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).longValue(); } @@ -636,7 +650,18 @@ */ public Object getObject(int columnIndex) throws SQLException { + if (row == null) { + throw new SQLException("No row found."); + } + + if (columnIndex > row.size()) { + throw new SQLException("Invalid columnIndex: " + columnIndex); + } + try { + this.wasNull = false; + if (row.get(columnIndex-1) == null) this.wasNull = true; + return row.get(columnIndex-1); } catch (Exception e) { @@ -742,7 +767,7 @@ public short getShort(int columnIndex) throws SQLException { try { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).shortValue(); } @@ -778,7 +803,10 @@ public String getString(int columnIndex) throws SQLException { // Column index starts from 1, not 0. - return row.get(columnIndex - 1).toString(); + Object obj = getObject(columnIndex); + if (obj == null) return null; + + return obj.toString(); } /* (non-Javadoc) @@ -914,8 +942,7 @@ */ public SQLWarning getWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return warningChain; } /* (non-Javadoc) @@ -1007,9 +1034,12 @@ */ public boolean next() throws SQLException { + if (this.maxRows > 0 && this.rowsFetched >= this.maxRows) return false; + String row_str = ""; try { row_str = (String)client.fetchOne(); + this.rowsFetched++; if (!row_str.equals("")) { Object o = ds.deserialize(new BytesWritable(row_str.getBytes())); row = (ArrayList)o; @@ -1888,8 +1918,7 @@ */ public boolean wasNull() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.wasNull; } /* (non-Javadoc) Index: jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java =================================================================== --- jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (revision 796955) +++ jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Thu Jul 23 12:10:12 PDT 2009 @@ -2,18 +2,15 @@ import java.sql.SQLException; import java.sql.Connection; -import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Statement; -import java.sql.Driver; import java.sql.DriverManager; -import java.util.Properties; +import java.sql.DatabaseMetaData; +import java.sql.DriverPropertyInfo; +import java.sql.ResultSetMetaData; +import java.sql.Types; import junit.framework.TestCase; -import javax.naming.*; -import javax.naming.directory.*; -import javax.sql.DataSource; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.hive.conf.HiveConf; public class TestJdbcDriver extends TestCase { @@ -44,6 +41,7 @@ con = DriverManager.getConnection("jdbc:hive://", "", ""); } assertNotNull("Connection is null", con); + assertFalse("Connection should not be closed", con.isClosed()); Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); @@ -92,33 +90,53 @@ res = stmt.executeQuery("drop table " + partitionedTableName); assertFalse(res.next()); + con.close(); + assertTrue("Connection should be closed", con.isClosed()); + + Exception expectedException = null; + try { + con.createStatement(); - } + } + catch(Exception e) { + expectedException = e; + } - + + assertNotNull("createStatement() on closed connection should throw exception", + expectedException); + } + public final void testSelectAll() throws Exception { + doTestSelectAll(this.tableName, -1); // tests not setting maxRows (return all) + doTestSelectAll(this.tableName, 0); // tests setting maxRows to 0 (return all) + } + + public final void testSelectAllPartioned() throws Exception { + doTestSelectAll(this.partitionedTableName, -1); // tests not setting maxRows (return all) + doTestSelectAll(this.partitionedTableName, 0); // tests setting maxRows to 0 (return all) + } + + public final void testSelectAllMaxRows() throws Exception { + doTestSelectAll(this.tableName, 100); + } + + private final void doTestSelectAll(String tableName, int maxRows) throws Exception { Statement stmt = con.createStatement(); + if (maxRows >= 0) stmt.setMaxRows(maxRows); + + //JDBC says that 0 means return all, which is the default + int expectedMaxRows = maxRows < 1 ? 0 : maxRows; + assertNotNull("Statement is null", stmt); + assertEquals("Statement max rows not as expected", expectedMaxRows, stmt.getMaxRows()); + assertFalse("Statement should not be closed", stmt.isClosed()); ResultSet res; - // TODO: There is no schema for show tables or describe table. - /* - stmt.executeQuery("drop table michi1"); - stmt.executeQuery("drop table michi2"); - stmt.executeQuery("drop table michi3"); - stmt.executeQuery("create table michi1 (num int)"); - stmt.executeQuery("create table michi2 (num int)"); - stmt.executeQuery("create table michi3 (num int)"); - - res = stmt.executeQuery("show tables"); - res = stmt.executeQuery("describe michi1"); - while (res.next()) { - System.out.println(res.getString(0)); - } - */ - // run some queries res = stmt.executeQuery("select * from " + tableName); assertNotNull("ResultSet is null", res); + assertTrue("getResultSet() not returning expected ResultSet", res == stmt.getResultSet()); + assertEquals("get update count not as expected", 0, stmt.getUpdateCount()); int i = 0; boolean moreRow = res.next(); @@ -128,6 +146,10 @@ res.getInt(1); res.getString(1); res.getString(2); + assertFalse("Last result value was not null", res.wasNull()); + assertNull("No warnings should be found on ResultSet", res.getWarnings()); + res.clearWarnings(); //verifying that method is supported + //System.out.println(res.getString(1) + " " + res.getString(2)); assertEquals("getInt and getString don't align for the same result value", String.valueOf(res.getInt(1)), res.getString(1)); @@ -141,49 +163,24 @@ throw new Exception(e.toString()); } } - // supposed to get 500 rows - assertEquals(500, i); + // supposed to get 500 rows if maxRows isn't set + int expectedRowCount = maxRows > 0 ? maxRows : 500; + assertEquals("Incorrect number of rows returned", expectedRowCount, i); + // should have no more rows assertEquals(false, moreRow); - } - public final void testSelectAllPartitioned() throws Exception { - Statement stmt = con.createStatement(); - assertNotNull("Statement is null", stmt); + assertNull("No warnings should be found on statement", stmt.getWarnings()); + stmt.clearWarnings(); //verifying that method is supported - // run some queries - ResultSet res = stmt.executeQuery("select * from " + partitionedTableName); - assertNotNull("ResultSet is null", res); - int i = 0; + assertNull("No warnings should be found on connection", con.getWarnings()); + con.clearWarnings(); //verifying that method is supported - boolean moreRow = res.next(); - while (moreRow) { - try { - i++; - res.getInt(1); - res.getString(1); - res.getString(2); - //System.out.println(res.getString(1) + " " + res.getString(2)); - assertEquals("getInt and getString don't align for the same result value", - String.valueOf(res.getInt(1)), res.getString(1)); - assertEquals("Unexpected result found", - "val_" + res.getString(1), res.getString(2)); - moreRow = res.next(); + stmt.close(); + assertTrue("Statement should be closed", stmt.isClosed()); - } + } - catch (SQLException e) { - System.out.println(e.toString()); - e.printStackTrace(); - throw new Exception(e.toString()); - } - } - // supposed to get 500 rows - assertEquals(500, i); - // should have no more rows - assertEquals(false, moreRow); - } - public void testShowTables() throws SQLException { Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); @@ -219,4 +216,80 @@ } + public void testDatabaseMetaData() throws SQLException { + DatabaseMetaData meta = con.getMetaData(); + + assertEquals("Hive", meta.getDatabaseProductName()); + assertEquals("0", meta.getDatabaseProductVersion()); + assertNull(meta.getProcedures(null, null, null)); + assertFalse(meta.supportsCatalogsInTableDefinitions()); + assertFalse(meta.supportsSchemasInTableDefinitions()); + assertFalse(meta.supportsSchemasInDataManipulation()); + assertFalse(meta.supportsMultipleResultSets()); + assertFalse(meta.supportsStoredProcedures()); -} + } + + public void testResultSetMetaData() throws SQLException { + Statement stmt = con.createStatement(); + ResultSet res = stmt.executeQuery("drop table " + tableName); + + //creating a table with tinyint is failing currently so not including + res = stmt.executeQuery("create table " + tableName + " (a string, b boolean, c bigint, d int, f double)"); + res = stmt.executeQuery("select * from " + tableName + " limit 1"); + + ResultSetMetaData meta = res.getMetaData(); + assertEquals("Unexpected column type", Types.VARCHAR, meta.getColumnType(1)); + assertEquals("Unexpected column type", Types.BOOLEAN, meta.getColumnType(2)); + assertEquals("Unexpected column type", Types.BIGINT, meta.getColumnType(3)); + assertEquals("Unexpected column type", Types.INTEGER, meta.getColumnType(4)); + assertEquals("Unexpected column type", Types.DOUBLE, meta.getColumnType(5)); + assertEquals("Unexpected column type name", "string", meta.getColumnTypeName(1)); + assertEquals("Unexpected column type name", "boolean", meta.getColumnTypeName(2)); + assertEquals("Unexpected column type name", "bigint", meta.getColumnTypeName(3)); + assertEquals("Unexpected column type name", "int", meta.getColumnTypeName(4)); + assertEquals("Unexpected column type name", "double", meta.getColumnTypeName(5)); + assertEquals("Unexpected column display size", 32, meta.getColumnDisplaySize(1)); + assertEquals("Unexpected column display size", 8, meta.getColumnDisplaySize(2)); + assertEquals("Unexpected column display size", 32, meta.getColumnDisplaySize(3)); + assertEquals("Unexpected column display size", 16, meta.getColumnDisplaySize(4)); + assertEquals("Unexpected column display size", 16, meta.getColumnDisplaySize(5)); + + for (int i = 1; i <= 5; i++) { + assertFalse(meta.isAutoIncrement(i)); + assertFalse(meta.isCurrency(i)); + assertEquals(ResultSetMetaData.columnNullable, meta.isNullable(i)); + + int expectedPrecision = i == 5 ? -1 : 0; + int expectedScale = i == 5 ? -1 : 0; + assertEquals("Unexpected precision", expectedPrecision, meta.getPrecision(i)); + assertEquals("Unexpected scale", expectedScale, meta.getScale(i)); + } + } + + // [url] [host] [port] [db] + private static final String[][] URL_PROPERTIES = new String[][] { + {"jdbc:hive://", "", "", "default"}, + {"jdbc:hive://localhost:10001/default", "localhost", "10001", "default"}, + {"jdbc:hive://localhost/notdefault", "localhost", "10000", "notdefault"}, + {"jdbc:hive://foo:1243", "foo", "1243", "default"} + }; + + public void testDriverProperties() throws SQLException { + HiveDriver driver = new HiveDriver(); + + for (String[] testValues : URL_PROPERTIES) { + DriverPropertyInfo[] dpi = driver.getPropertyInfo(testValues[0], null); + assertEquals("unexpected DriverPropertyInfo array size", 3, dpi.length); + assertDpi(dpi[0], "HOST", testValues[1]); + assertDpi(dpi[1], "PORT", testValues[2]); + assertDpi(dpi[2], "DBNAME", testValues[3]); + } + + } + + private static void assertDpi(DriverPropertyInfo dpi, String name, String value) { + assertEquals("Invalid DriverPropertyInfo name", name, dpi.name); + assertEquals("Invalid DriverPropertyInfo value", value, dpi.value); + assertEquals("Invalid DriverPropertyInfo required", false, dpi.required); + } +} Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (revision 796955) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java Thu Jul 23 11:07:46 PDT 2009 @@ -3,13 +3,17 @@ */ package org.apache.hadoop.hive.jdbc; +import org.apache.hadoop.hive.serde.Constants; + import java.sql.SQLException; +import java.sql.ResultSetMetaData; +import java.sql.Types; import java.util.List; public class HiveResultSetMetaData implements java.sql.ResultSetMetaData { List columnNames; List columnTypes; - + public HiveResultSetMetaData(List columnNames, List columnTypes) { this.columnNames = columnNames; this.columnTypes = columnTypes; @@ -46,9 +50,23 @@ */ public int getColumnDisplaySize(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + + // taking a stab at appropriate values + switch(getColumnType(column)) { + case Types.VARCHAR: + case Types.BIGINT: + return 32; + case Types.TINYINT: + return 2; + case Types.BOOLEAN: + return 8; + case Types.DOUBLE: + case Types.INTEGER: + return 16; + default: + return 32; - } + } + } /* (non-Javadoc) * @see java.sql.ResultSetMetaData#getColumnLabel(int) @@ -72,8 +90,31 @@ */ public int getColumnType(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + if (columnTypes == null) + throw new SQLException("Could not determine column type name for ResultSet"); + + if (column < 1 || column > columnTypes.size()) + throw new SQLException("Invalid column value: " + column); + + // we need to convert the thrift type to the SQL type + String type = columnTypes.get(column-1); + + // we need to convert the thrift type to the SQL type + //TODO: this would be better handled in an enum + if ("string".equals(type)) + return Types.VARCHAR; + else if ("bool".equals(type)) + return Types.BOOLEAN; + else if ("double".equals(type)) + return Types.DOUBLE; + else if ("byte".equals(type)) + return Types.TINYINT; + else if ("i32".equals(type)) + return Types.INTEGER; + else if ("i64".equals(type)) + return Types.BIGINT; + + throw new SQLException("Inrecognized column type: " + type); } /* (non-Javadoc) @@ -81,7 +122,29 @@ */ public String getColumnTypeName(int column) throws SQLException { - return columnTypes.get(column-1); + if (columnTypes == null) + throw new SQLException("Could not determine column type name for ResultSet"); + + if (column < 1 || column > columnTypes.size()) + throw new SQLException("Invalid column value: " + column); + + // we need to convert the thrift type to the SQL type name + //TODO: this would be better handled in an enum + String type = columnTypes.get(column-1); + if ("string".equals(type)) + return Constants.STRING_TYPE_NAME; + else if ("double".equals(type)) + return Constants.DOUBLE_TYPE_NAME; + else if ("bool".equals(type)) + return Constants.BOOLEAN_TYPE_NAME; + else if ("byte".equals(type)) + return Constants.TINYINT_TYPE_NAME; + else if ("i32".equals(type)) + return Constants.INT_TYPE_NAME; + else if ("i64".equals(type)) + return Constants.BIGINT_TYPE_NAME; + + throw new SQLException("Inrecognized column type: " + type); } /* (non-Javadoc) @@ -89,8 +152,9 @@ */ public int getPrecision(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + if (Types.DOUBLE == getColumnType(column)) return -1; //Do we have a precision limit? + + return 0; } /* (non-Javadoc) @@ -98,8 +162,9 @@ */ public int getScale(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + if (Types.DOUBLE == getColumnType(column)) return -1; //Do we have a scale limit? + + return 0; } /* (non-Javadoc) @@ -125,8 +190,8 @@ */ public boolean isAutoIncrement(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + // Hive doesn't have an auto-increment concept + return false; } /* (non-Javadoc) @@ -143,8 +208,8 @@ */ public boolean isCurrency(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + // Hive doesn't support a currency type + return false; } /* (non-Javadoc) @@ -161,8 +226,8 @@ */ public int isNullable(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + // Hive doesn't have the concept of not-null + return ResultSetMetaData.columnNullable; } /* (non-Javadoc) Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java (revision 796955) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java Thu Jul 23 10:52:08 PDT 2009 @@ -35,6 +35,31 @@ private static final boolean JDBC_COMPLIANT = false; /** + * The required prefix for the connection url + */ + private static final String URL_PREFIX = "jdbc:hive://"; + + /** + * If host is provided, without a port + */ + private static final String DEFAULT_PORT = "10000"; + + /** + * Property key for the database name + */ + private static final String DBNAME_PROPERTY_KEY = "DBNAME"; + + /** + * Property key for the Hive Server host + */ + private static final String HOST_PROPERTY_KEY = "HOST"; + + /** + * Property key for the Hive Server port + */ + private static final String PORT_PROPERTY_KEY = "PORT"; + + /** * */ public HiveDriver() { @@ -60,7 +85,7 @@ */ public boolean acceptsURL(String url) throws SQLException { - return Pattern.matches("jdbc:hive://", url); + return Pattern.matches(URL_PREFIX, url); } @@ -88,13 +113,40 @@ return MINOR_VERSION; } - public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + if (info == null) { + info = new Properties(); - } + } + if ((url != null) && url.startsWith(URL_PREFIX)) { + info = parseURL(url, info); + } + + DriverPropertyInfo hostProp = new DriverPropertyInfo(HOST_PROPERTY_KEY, + info.getProperty(HOST_PROPERTY_KEY, "")); + hostProp.required = false; + hostProp.description = "Hostname of Hive Server"; + + DriverPropertyInfo portProp = new DriverPropertyInfo(PORT_PROPERTY_KEY, + info.getProperty(PORT_PROPERTY_KEY, "")); + portProp.required = false; + portProp.description = "Port number of Hive Server"; + + DriverPropertyInfo dbProp = new DriverPropertyInfo(DBNAME_PROPERTY_KEY, + info.getProperty(DBNAME_PROPERTY_KEY, "default")); + dbProp.required = false; + dbProp.description = "Database name"; + + DriverPropertyInfo[] dpi = new DriverPropertyInfo[3]; + + dpi[0] = hostProp; + dpi[1] = portProp; + dpi[2] = dbProp; + + return dpi; + } + /** * Returns whether the driver is JDBC compliant. */ @@ -103,4 +155,49 @@ return JDBC_COMPLIANT; } + /** + * Takes a url in the form of jdbc:hive://[hostname]:[port]/[db_name] and parses it. + * Everything after jdbc:hive// is optional. + * + * @param url + * @param defaults + * @return + * @throws java.sql.SQLException + */ + private Properties parseURL(String url, Properties defaults) + throws java.sql.SQLException { + Properties urlProps = (defaults != null) ? new Properties(defaults) + : new Properties(); + + if (url == null || !url.startsWith(URL_PREFIX)) { + throw new SQLException("Invalid connection url: " + url); -} + } + + if (url.length() <= URL_PREFIX.length()) return urlProps; + + // [hostname]:[port]/[db_name] + String connectionInfo = url.substring(URL_PREFIX.length()); + + // [hostname]:[port] [db_name] + String[] hostPortAndDatabase = connectionInfo.split("/", 2); + + // [hostname]:[port] + if (hostPortAndDatabase[0].length() > 0) { + String[] hostAndPort = hostPortAndDatabase[0].split(":", 2); + urlProps.put(HOST_PROPERTY_KEY, hostAndPort[0]); + if (hostAndPort.length > 1) { + urlProps.put(PORT_PROPERTY_KEY, hostAndPort[1]); + } + else { + urlProps.put(PORT_PROPERTY_KEY, DEFAULT_PORT); + } + } + + // [db_name] + if (hostPortAndDatabase.length > 1) { + urlProps.put(DBNAME_PROPERTY_KEY, hostPortAndDatabase[1]); + } + + return urlProps; + } +}