diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java index 4090e72..716e608 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -1400,53 +1400,53 @@ public void testResultSetMetaData() throws SQLException { assertTrue(colRS.next()); assertEquals("a", meta.getColumnName(5)); - assertEquals(Types.VARCHAR, meta.getColumnType(5)); - assertEquals("string", meta.getColumnTypeName(5)); + assertEquals(Types.ARRAY, meta.getColumnType(5)); + assertEquals("array", meta.getColumnTypeName(5)); assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(5)); assertEquals(Integer.MAX_VALUE, meta.getPrecision(5)); assertEquals(0, meta.getScale(5)); assertEquals("c5", colRS.getString("COLUMN_NAME")); - assertEquals(Types.VARCHAR, colRS.getInt("DATA_TYPE")); + assertEquals(Types.ARRAY, colRS.getInt("DATA_TYPE")); assertEquals("array", colRS.getString("TYPE_NAME").toLowerCase()); assertTrue(colRS.next()); assertEquals("c6", meta.getColumnName(6)); - assertEquals(Types.VARCHAR, meta.getColumnType(6)); - assertEquals("string", meta.getColumnTypeName(6)); + assertEquals(Types.JAVA_OBJECT, meta.getColumnType(6)); + assertEquals("map", meta.getColumnTypeName(6)); assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(6)); assertEquals(Integer.MAX_VALUE, meta.getPrecision(6)); assertEquals(0, meta.getScale(6)); assertEquals("c6", colRS.getString("COLUMN_NAME")); - assertEquals(Types.VARCHAR, colRS.getInt("DATA_TYPE")); + assertEquals(Types.JAVA_OBJECT, colRS.getInt("DATA_TYPE")); assertEquals("map", colRS.getString("TYPE_NAME").toLowerCase()); assertTrue(colRS.next()); assertEquals("c7", meta.getColumnName(7)); - assertEquals(Types.VARCHAR, meta.getColumnType(7)); - assertEquals("string", meta.getColumnTypeName(7)); + assertEquals(Types.JAVA_OBJECT, meta.getColumnType(7)); + assertEquals("map", meta.getColumnTypeName(7)); assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(7)); assertEquals(Integer.MAX_VALUE, meta.getPrecision(7)); assertEquals(0, meta.getScale(7)); assertEquals("c7", colRS.getString("COLUMN_NAME")); - assertEquals(Types.VARCHAR, colRS.getInt("DATA_TYPE")); + assertEquals(Types.JAVA_OBJECT, colRS.getInt("DATA_TYPE")); assertEquals("map", colRS.getString("TYPE_NAME").toLowerCase()); assertTrue(colRS.next()); assertEquals("c8", meta.getColumnName(8)); - assertEquals(Types.VARCHAR, meta.getColumnType(8)); - assertEquals("string", meta.getColumnTypeName(8)); + assertEquals(Types.STRUCT, meta.getColumnType(8)); + assertEquals("struct", meta.getColumnTypeName(8)); assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(8)); assertEquals(Integer.MAX_VALUE, meta.getPrecision(8)); assertEquals(0, meta.getScale(8)); assertEquals("c8", colRS.getString("COLUMN_NAME")); - assertEquals(Types.VARCHAR, colRS.getInt("DATA_TYPE")); + assertEquals(Types.STRUCT, colRS.getInt("DATA_TYPE")); assertEquals("struct", colRS.getString("TYPE_NAME").toLowerCase()); assertTrue(colRS.next()); @@ -1517,8 +1517,8 @@ public void testResultSetMetaData() throws SQLException { assertEquals(0, meta.getScale(13)); assertEquals("b", meta.getColumnName(14)); - assertEquals(Types.VARCHAR, meta.getColumnType(14)); - assertEquals("string", meta.getColumnTypeName(14)); + assertEquals(Types.ARRAY, meta.getColumnType(14)); + assertEquals("array", meta.getColumnTypeName(14)); assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(14)); assertEquals(Integer.MAX_VALUE, meta.getPrecision(14)); assertEquals(0, meta.getScale(14)); diff --git jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java index 257c88a..fa8eb88 100644 --- jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java +++ jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java @@ -410,6 +410,11 @@ private Object evaluate(Type type, Object value) { return new BigDecimal((String)value); case DATE_TYPE: return Date.valueOf((String) value); + case ARRAY_TYPE: + case MAP_TYPE: + case STRUCT_TYPE: + // todo: returns json string. should recreate object from it? + return value; default: return value; } diff --git jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java index e89571f..dc364e1 100644 --- jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java +++ jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java @@ -20,11 +20,8 @@ import java.sql.ResultSetMetaData; import java.sql.SQLException; -import java.util.Arrays; import java.util.List; -import org.apache.hadoop.hive.serde.serdeConstants; - /** * HiveResultSetMetaData. * @@ -47,7 +44,8 @@ public String getCatalogName(int column) throws SQLException { } public String getColumnClassName(int column) throws SQLException { - throw new SQLException("Method not supported"); + int columnType = getColumnType(column); + return JdbcColumn.columnClassName(columnType, columnAttributes.get(toZeroIndex(column))); } public int getColumnCount() throws SQLException { @@ -57,91 +55,39 @@ public int getColumnCount() throws SQLException { public int getColumnDisplaySize(int column) throws SQLException { int columnType = getColumnType(column); - return JdbcColumn.columnDisplaySize(columnType, columnAttributes.get(column - 1)); + return JdbcColumn.columnDisplaySize(columnType, columnAttributes.get(toZeroIndex(column))); } public String getColumnLabel(int column) throws SQLException { - return columnNames.get(column - 1); + return columnNames.get(toZeroIndex(column)); } public String getColumnName(int column) throws SQLException { - return columnNames.get(column - 1); + return columnNames.get(toZeroIndex(column)); } public int getColumnType(int column) throws SQLException { - if (columnTypes == null) { - throw new SQLException( - "Could not determine column type name for ResultSet"); - } - - if (column < 1 || column > columnTypes.size()) { - throw new SQLException("Invalid column value: " + column); - } - // we need to convert the thrift type to the SQL type - String type = columnTypes.get(column - 1); + String type = columnTypes.get(toZeroIndex(column)); // we need to convert the thrift type to the SQL type - return Utils.hiveTypeToSqlType(type); + return JdbcColumn.hiveTypeToSqlType(type); } public String getColumnTypeName(int column) throws SQLException { - validateColumnType(column); - - // we need to convert the Hive type to the SQL type name - // TODO: this would be better handled in an enum - String type = columnTypes.get(column - 1); - if ("string".equalsIgnoreCase(type)) { - return serdeConstants.STRING_TYPE_NAME; - } else if ("varchar".equalsIgnoreCase(type)) { - return serdeConstants.VARCHAR_TYPE_NAME; - } else if ("char".equalsIgnoreCase(type)) { - return serdeConstants.CHAR_TYPE_NAME; - } else if ("float".equalsIgnoreCase(type)) { - return serdeConstants.FLOAT_TYPE_NAME; - } else if ("double".equalsIgnoreCase(type)) { - return serdeConstants.DOUBLE_TYPE_NAME; - } else if ("boolean".equalsIgnoreCase(type)) { - return serdeConstants.BOOLEAN_TYPE_NAME; - } else if ("tinyint".equalsIgnoreCase(type)) { - return serdeConstants.TINYINT_TYPE_NAME; - } else if ("smallint".equalsIgnoreCase(type)) { - return serdeConstants.SMALLINT_TYPE_NAME; - } else if ("int".equalsIgnoreCase(type)) { - return serdeConstants.INT_TYPE_NAME; - } else if ("bigint".equalsIgnoreCase(type)) { - return serdeConstants.BIGINT_TYPE_NAME; - } else if ("timestamp".equalsIgnoreCase(type)) { - return serdeConstants.TIMESTAMP_TYPE_NAME; - } else if ("date".equalsIgnoreCase(type)) { - return serdeConstants.DATE_TYPE_NAME; - } else if ("decimal".equalsIgnoreCase(type)) { - return serdeConstants.DECIMAL_TYPE_NAME; - } else if ("binary".equalsIgnoreCase(type)) { - return serdeConstants.BINARY_TYPE_NAME; - } else if ("void".equalsIgnoreCase(type)) { - return serdeConstants.VOID_TYPE_NAME; - } else if (type.startsWith("map<")) { - return serdeConstants.STRING_TYPE_NAME; - } else if (type.startsWith("array<")) { - return serdeConstants.STRING_TYPE_NAME; - } else if (type.startsWith("struct<")) { - return serdeConstants.STRING_TYPE_NAME; - } - - throw new SQLException("Unrecognized column type: " + type); + return JdbcColumn.getColumnTypeName(columnTypes.get(toZeroIndex(column))); } public int getPrecision(int column) throws SQLException { int columnType = getColumnType(column); - return JdbcColumn.columnPrecision(columnType, columnAttributes.get(column - 1)); + return JdbcColumn.columnPrecision(columnType, columnAttributes.get(toZeroIndex(column))); } public int getScale(int column) throws SQLException { int columnType = getColumnType(column); - return JdbcColumn.columnScale(columnType, columnAttributes.get(column - 1)); + return JdbcColumn.columnScale(columnType, columnAttributes.get(toZeroIndex(column))); } public String getSchemaName(int column) throws SQLException { @@ -158,11 +104,9 @@ public boolean isAutoIncrement(int column) throws SQLException { } public boolean isCaseSensitive(int column) throws SQLException { - validateColumnType(column); - // we need to convert the Hive type to the SQL type name // TODO: this would be better handled in an enum - String type = columnTypes.get(column - 1); + String type = columnTypes.get(toZeroIndex(column)); if("string".equalsIgnoreCase(type)) { return true; @@ -209,14 +153,14 @@ public boolean isWrapperFor(Class iface) throws SQLException { throw new SQLException("Method not supported"); } - protected void validateColumnType(int column) throws SQLException { + protected int toZeroIndex(int column) throws SQLException { if (columnTypes == null) { throw new SQLException( "Could not determine column type name for ResultSet"); } - if (column < 1 || column > columnTypes.size()) { throw new SQLException("Invalid column value: " + column); - } + } + return column - 1; } } diff --git jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java index 42ec32a..2c38994 100644 --- jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java +++ jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java @@ -18,7 +18,12 @@ package org.apache.hive.jdbc; +import org.apache.hadoop.hive.serde.serdeConstants; + +import java.math.BigInteger; +import java.sql.Date; import java.sql.SQLException; +import java.sql.Timestamp; import java.sql.Types; @@ -59,8 +64,125 @@ public String getType() { return type; } - public Integer getSqlType() throws SQLException { - return Utils.hiveTypeToSqlType(type); + static String columnClassName(int columnType, JdbcColumnAttributes columnAttributes) + throws SQLException { + // according to hiveTypeToSqlType possible options are: + switch(columnType) { + case Types.BOOLEAN: + return Boolean.class.getName(); + case Types.CHAR: + case Types.VARCHAR: + return String.class.getName(); + case Types.TINYINT: + return Byte.class.getName(); + case Types.SMALLINT: + return Short.class.getName(); + case Types.INTEGER: + return Integer.class.getName(); + case Types.BIGINT: + return Long.class.getName(); + case Types.DATE: + return Date.class.getName(); + case Types.FLOAT: + return Float.class.getName(); + case Types.DOUBLE: + return Double.class.getName(); + case Types.TIMESTAMP: + return Timestamp.class.getName(); + case Types.DECIMAL: + return BigInteger.class.getName(); + case Types.BINARY: + return byte[].class.getName(); + case Types.JAVA_OBJECT: + case Types.ARRAY: + case Types.STRUCT: + return String.class.getName(); + default: + throw new SQLException("Invalid column type: " + columnType); + } + } + + public static int hiveTypeToSqlType(String type) throws SQLException { + if ("string".equalsIgnoreCase(type)) { + return Types.VARCHAR; + } else if ("varchar".equalsIgnoreCase(type)) { + return Types.VARCHAR; + } else if ("char".equalsIgnoreCase(type)) { + return Types.CHAR; + } else if ("float".equalsIgnoreCase(type)) { + return Types.FLOAT; + } else if ("double".equalsIgnoreCase(type)) { + return Types.DOUBLE; + } else if ("boolean".equalsIgnoreCase(type)) { + return Types.BOOLEAN; + } else if ("tinyint".equalsIgnoreCase(type)) { + return Types.TINYINT; + } else if ("smallint".equalsIgnoreCase(type)) { + return Types.SMALLINT; + } else if ("int".equalsIgnoreCase(type)) { + return Types.INTEGER; + } else if ("bigint".equalsIgnoreCase(type)) { + return Types.BIGINT; + } else if ("date".equalsIgnoreCase(type)) { + return Types.DATE; + } else if ("timestamp".equalsIgnoreCase(type)) { + return Types.TIMESTAMP; + } else if ("decimal".equalsIgnoreCase(type)) { + return Types.DECIMAL; + } else if ("binary".equalsIgnoreCase(type)) { + return Types.BINARY; + } else if ("map".equalsIgnoreCase(type)) { + return Types.JAVA_OBJECT; + } else if ("array".equalsIgnoreCase(type)) { + return Types.ARRAY; + } else if ("struct".equalsIgnoreCase(type)) { + return Types.STRUCT; + } + throw new SQLException("Unrecognized column type: " + type); + } + + static String getColumnTypeName(String type) throws SQLException { + // we need to convert the Hive type to the SQL type name + // TODO: this would be better handled in an enum + if ("string".equalsIgnoreCase(type)) { + return serdeConstants.STRING_TYPE_NAME; + } else if ("varchar".equalsIgnoreCase(type)) { + return serdeConstants.VARCHAR_TYPE_NAME; + } else if ("char".equalsIgnoreCase(type)) { + return serdeConstants.CHAR_TYPE_NAME; + } else if ("float".equalsIgnoreCase(type)) { + return serdeConstants.FLOAT_TYPE_NAME; + } else if ("double".equalsIgnoreCase(type)) { + return serdeConstants.DOUBLE_TYPE_NAME; + } else if ("boolean".equalsIgnoreCase(type)) { + return serdeConstants.BOOLEAN_TYPE_NAME; + } else if ("tinyint".equalsIgnoreCase(type)) { + return serdeConstants.TINYINT_TYPE_NAME; + } else if ("smallint".equalsIgnoreCase(type)) { + return serdeConstants.SMALLINT_TYPE_NAME; + } else if ("int".equalsIgnoreCase(type)) { + return serdeConstants.INT_TYPE_NAME; + } else if ("bigint".equalsIgnoreCase(type)) { + return serdeConstants.BIGINT_TYPE_NAME; + } else if ("timestamp".equalsIgnoreCase(type)) { + return serdeConstants.TIMESTAMP_TYPE_NAME; + } else if ("date".equalsIgnoreCase(type)) { + return serdeConstants.DATE_TYPE_NAME; + } else if ("decimal".equalsIgnoreCase(type)) { + return serdeConstants.DECIMAL_TYPE_NAME; + } else if ("binary".equalsIgnoreCase(type)) { + return serdeConstants.BINARY_TYPE_NAME; + } else if ("void".equalsIgnoreCase(type)) { + return serdeConstants.VOID_TYPE_NAME; + } else if (type.equalsIgnoreCase("map")) { + return serdeConstants.MAP_TYPE_NAME; + } else if (type.equalsIgnoreCase("array")) { + return serdeConstants.LIST_TYPE_NAME; + } else if (type.equalsIgnoreCase("struct")) { + return serdeConstants.STRUCT_TYPE_NAME; + } + + throw new SQLException("Unrecognized column type: " + type); } static int columnDisplaySize(int columnType, JdbcColumnAttributes columnAttributes) @@ -90,6 +212,11 @@ static int columnDisplaySize(int columnType, JdbcColumnAttributes columnAttribut return 25; // e.g. -(17#).e-#### case Types.DECIMAL: return columnPrecision(columnType, columnAttributes) + 2; // '-' sign and '.' + case Types.BINARY: + case Types.JAVA_OBJECT: + case Types.ARRAY: + case Types.STRUCT: + return Integer.MAX_VALUE; default: throw new SQLException("Invalid column type: " + columnType); } @@ -125,6 +252,11 @@ static int columnPrecision(int columnType, JdbcColumnAttributes columnAttributes return 29; case Types.DECIMAL: return columnAttributes.precision; + case Types.BINARY: + case Types.JAVA_OBJECT: + case Types.ARRAY: + case Types.STRUCT: + return Integer.MAX_VALUE; default: throw new SQLException("Invalid column type: " + columnType); } @@ -151,6 +283,11 @@ static int columnScale(int columnType, JdbcColumnAttributes columnAttributes) return 9; case Types.DECIMAL: return columnAttributes.scale; + case Types.BINARY: + case Types.JAVA_OBJECT: + case Types.ARRAY: + case Types.STRUCT: + return 0; default: throw new SQLException("Invalid column type: " + columnType); } diff --git jdbc/src/java/org/apache/hive/jdbc/Utils.java jdbc/src/java/org/apache/hive/jdbc/Utils.java index 913dc46..c1be950 100644 --- jdbc/src/java/org/apache/hive/jdbc/Utils.java +++ jdbc/src/java/org/apache/hive/jdbc/Utils.java @@ -104,52 +104,6 @@ public void setSessionVars(Map sessionVars) { } } - - /** - * Convert hive types to sql types. - * @param type - * @return Integer java.sql.Types values - * @throws SQLException - */ - public static int hiveTypeToSqlType(String type) throws SQLException { - if ("string".equalsIgnoreCase(type)) { - return Types.VARCHAR; - } else if ("varchar".equalsIgnoreCase(type)) { - return Types.VARCHAR; - } else if ("char".equalsIgnoreCase(type)) { - return Types.CHAR; - } else if ("float".equalsIgnoreCase(type)) { - return Types.FLOAT; - } else if ("double".equalsIgnoreCase(type)) { - return Types.DOUBLE; - } else if ("boolean".equalsIgnoreCase(type)) { - return Types.BOOLEAN; - } else if ("tinyint".equalsIgnoreCase(type)) { - return Types.TINYINT; - } else if ("smallint".equalsIgnoreCase(type)) { - return Types.SMALLINT; - } else if ("int".equalsIgnoreCase(type)) { - return Types.INTEGER; - } else if ("bigint".equalsIgnoreCase(type)) { - return Types.BIGINT; - } else if ("date".equalsIgnoreCase(type)) { - return Types.DATE; - } else if ("timestamp".equalsIgnoreCase(type)) { - return Types.TIMESTAMP; - } else if ("decimal".equalsIgnoreCase(type)) { - return Types.DECIMAL; - } else if ("binary".equalsIgnoreCase(type)) { - return Types.BINARY; - } else if (type.startsWith("map<")) { - return Types.VARCHAR; - } else if (type.startsWith("array<")) { - return Types.VARCHAR; - } else if (type.startsWith("struct<")) { - return Types.VARCHAR; - } - throw new SQLException("Unrecognized column type: " + type); - } - // Verify success or success_with_info status, else throw SQLException public static void verifySuccessWithInfo(TStatus status) throws SQLException { verifySuccess(status, true); diff --git service/src/java/org/apache/hive/service/cli/Type.java service/src/java/org/apache/hive/service/cli/Type.java index 9329392..90bd39d 100644 --- service/src/java/org/apache/hive/service/cli/Type.java +++ service/src/java/org/apache/hive/service/cli/Type.java @@ -76,24 +76,24 @@ TTypeId.DECIMAL_TYPE, true, false, false), ARRAY_TYPE("ARRAY", - java.sql.Types.VARCHAR, - TTypeId.STRING_TYPE, + java.sql.Types.ARRAY, + TTypeId.ARRAY_TYPE, true, true), MAP_TYPE("MAP", - java.sql.Types.VARCHAR, - TTypeId.STRING_TYPE, + java.sql.Types.JAVA_OBJECT, + TTypeId.MAP_TYPE, true, true), STRUCT_TYPE("STRUCT", - java.sql.Types.VARCHAR, - TTypeId.STRING_TYPE, + java.sql.Types.STRUCT, + TTypeId.STRUCT_TYPE, true, false), UNION_TYPE("UNIONTYPE", - java.sql.Types.VARCHAR, - TTypeId.STRING_TYPE, + java.sql.Types.OTHER, + TTypeId.UNION_TYPE, true, false), USER_DEFINED_TYPE("USER_DEFINED", - java.sql.Types.VARCHAR, - TTypeId.STRING_TYPE, + java.sql.Types.OTHER, + TTypeId.USER_DEFINED_TYPE, true, false); private final String name;