Index: jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java =================================================================== --- jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (revision 1426029) +++ jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (working copy) @@ -21,6 +21,7 @@ import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME; import static org.apache.hadoop.hive.ql.processors.SetProcessor.SET_COLUMN_NAME; +import java.io.InputStream; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverManager; @@ -148,7 +149,7 @@ + " c14 map>," + " c15 struct>," + " c16 array,n:int>>," - + " c17 timestamp) comment '"+dataTypeTableComment + + " c17 timestamp, c18 binary) comment '"+dataTypeTableComment +"' partitioned by (dt STRING)"); assertFalse(res.next()); @@ -209,7 +210,7 @@ ResultSet res = stmt.executeQuery( "explain select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " + - "c1*2, sentences(null, null, null) as b from " + dataTypeTableName + " limit 1"); + "c1*2, sentences(null, null, null) as b, c17, c18 from " + dataTypeTableName + " limit 1"); ResultSetMetaData md = res.getMetaData(); assertEquals(md.getColumnCount(), 1); // only one result column @@ -381,6 +382,7 @@ assertEquals("[]", res.getString(16)); assertEquals(null, res.getString(17)); assertEquals(null, res.getTimestamp(17)); + assertEquals(null, res.getBinaryStream(18)); // row 3 assertTrue(res.next()); @@ -403,6 +405,15 @@ assertEquals("2012-04-22 09:00:00.123456789", res.getString(17)); assertEquals("2012-04-22 09:00:00.123456789", res.getTimestamp(17).toString()); + byte[] bytes = "X'01FF'".getBytes("UTF-8"); + InputStream resultSetInputStream = res.getBinaryStream(18); + int len = bytes.length; + byte[] b = new byte[len]; + resultSetInputStream.read(b, 0, len); + for ( int i = 0; i< len; i++) { + assertEquals(bytes[i], b[i]); + } + // test getBoolean rules on non-boolean columns assertEquals(true, res.getBoolean(1)); assertEquals(true, res.getBoolean(4)); @@ -803,13 +814,13 @@ ResultSet res = stmt.executeQuery( "select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " + - "c1*2, sentences(null, null, null) as b, c17 from " + dataTypeTableName + " limit 1"); + "c1*2, sentences(null, null, null) as b, c17, c18 from " + dataTypeTableName + " limit 1"); ResultSetMetaData meta = res.getMetaData(); ResultSet colRS = con.getMetaData().getColumns(null, null, dataTypeTableName.toLowerCase(), null); - assertEquals(15, meta.getColumnCount()); + assertEquals(16, meta.getColumnCount()); assertTrue(colRS.next()); @@ -1012,6 +1023,13 @@ assertEquals(29, meta.getPrecision(15)); assertEquals(9, meta.getScale(15)); + assertEquals("c18", meta.getColumnName(16)); + assertEquals(Types.BINARY, meta.getColumnType(16)); + assertEquals("binary", meta.getColumnTypeName(16)); + assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(16)); + assertEquals(Integer.MAX_VALUE, meta.getPrecision(16)); + assertEquals(0, meta.getScale(16)); + for (int i = 1; i <= meta.getColumnCount(); i++) { assertFalse(meta.isAutoIncrement(i)); assertFalse(meta.isCurrency(i)); Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java (revision 1426029) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java (working copy) @@ -68,6 +68,7 @@ case Types.BOOLEAN: return columnPrecision(columnType); case Types.VARCHAR: + case Types.BINARY: return Integer.MAX_VALUE; // hive has no max limit for strings case Types.TINYINT: case Types.SMALLINT: @@ -93,6 +94,7 @@ case Types.BOOLEAN: return 1; case Types.VARCHAR: + case Types.BINARY: return Integer.MAX_VALUE; // hive has no max limit for strings case Types.TINYINT: return 3; @@ -118,6 +120,7 @@ switch(columnType) { case Types.BOOLEAN: case Types.VARCHAR: + case Types.BINARY: case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java (revision 1426029) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java (working copy) @@ -48,6 +48,8 @@ return Types.BIGINT; } else if ("timestamp".equalsIgnoreCase(type)) { return Types.TIMESTAMP; + } else if ("binary".equalsIgnoreCase(type)) { + return Types.BINARY; } else if (type.startsWith("map<")) { return Types.VARCHAR; } else if (type.startsWith("array<")) { Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java (revision 1426029) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java (working copy) @@ -18,8 +18,10 @@ package org.apache.hadoop.hive.jdbc; +import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.Reader; +import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.net.URL; import java.sql.Array; @@ -118,11 +120,35 @@ } public InputStream getBinaryStream(int columnIndex) throws SQLException { - throw new SQLException("Method not supported"); + Object obj = getObject(columnIndex); + if (obj == null) { + return null; + } + else if (obj instanceof InputStream) { + return (InputStream)obj; + } + else if (obj instanceof byte[] ) { + byte[] byteArray = (byte[])obj; + InputStream is = new ByteArrayInputStream(byteArray); + return is; + } + else if (obj instanceof String) { + String str = (String)obj; + InputStream is = null; + try { + is = new ByteArrayInputStream(str.getBytes("UTF-8")); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + new SQLException("Illegal conversion - Unsupported encoding exception"); + } + return is; + } + throw new SQLException("Illegal conversion"); } public InputStream getBinaryStream(String columnName) throws SQLException { - throw new SQLException("Method not supported"); + return getBinaryStream(findColumn(columnName)); + //throw new SQLException("Method not supported"); } public Blob getBlob(int i) throws SQLException { Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java (revision 1426029) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java (working copy) @@ -41,6 +41,7 @@ import java.sql.Timestamp; import java.util.Calendar; import java.util.HashMap; +import java.util.Scanner; import org.apache.hadoop.hive.service.HiveInterface; import org.apache.hadoop.hive.service.HiveServerException; @@ -325,8 +326,8 @@ */ public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + String str = new Scanner(x, "UTF-8").useDelimiter("\\A").next(); + this.parameters.put(parameterIndex, str); } /* @@ -337,7 +338,7 @@ */ public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException { - // TODO Auto-generated method stub + //TODO Auto-generated method stub throw new SQLException("Method not supported"); } Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (revision 1426029) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (working copy) @@ -112,6 +112,8 @@ return serdeConstants.BIGINT_TYPE_NAME; } else if ("timestamp".equalsIgnoreCase(type)) { return serdeConstants.TIMESTAMP_TYPE_NAME; + } else if ("binary".equalsIgnoreCase(type)) { + return serdeConstants.BINARY_TYPE_NAME; } else if (type.startsWith("map<")) { return serdeConstants.STRING_TYPE_NAME; } else if (type.startsWith("array<")) { Index: data/files/datatypes.txt =================================================================== --- data/files/datatypes.txt (revision 1426029) +++ data/files/datatypes.txt (working copy) @@ -1,3 +1,3 @@ -\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N --1false-1.1\N\N\N-1-1-1.0-1\N\N\N -1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd22012-04-22 09:00:00.123456789 +\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N +-1false-1.1\N\N\N-1-1-1.0-1\N\N\N\N +1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd22012-04-22 09:00:00.123456789X'01FF'