Index: service/src/java/org/apache/hadoop/hive/service/HiveServer.java =================================================================== --- service/src/java/org/apache/hadoop/hive/service/HiveServer.java (revision 786634) +++ service/src/java/org/apache/hadoop/hive/service/HiveServer.java (working copy) @@ -118,9 +118,16 @@ */ public String getSchema() throws HiveServerException, TException { try { - return driver.getSchema(); + String schema = driver.getSchema(); + if (schema == null) { + schema = ""; + } + LOG.info("Returning schema: " + schema); + return schema; } catch (Exception e) { + LOG.error(e.toString()); + e.printStackTrace(); throw new HiveServerException("Unable to get schema: " + e.toString()); } } @@ -210,8 +217,8 @@ TServer server = new TThreadPoolServer(processor, serverTransport, new TTransportFactory(), new TTransportFactory(), new TBinaryProtocol.Factory(), new TBinaryProtocol.Factory(), options); + HiveServerHandler.LOG.info("Starting hive server on port " + port); server.serve(); - HiveServerHandler.LOG.info("Started the new hive server on port " + port); } catch (Exception x) { x.printStackTrace(); } Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java (revision 786634) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java (working copy) @@ -246,8 +246,7 @@ */ public String getDriverName() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return new String("hive"); } /* (non-Javadoc) @@ -255,8 +254,7 @@ */ public String getDriverVersion() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return new String("0"); } /* (non-Javadoc) @@ -332,8 +330,7 @@ */ public int getJDBCMajorVersion() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return 3; } /* (non-Javadoc) @@ -341,8 +338,7 @@ */ public int getJDBCMinorVersion() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return 0; } /* (non-Javadoc) @@ -1383,8 +1379,7 @@ public boolean supportsResultSetHoldability(int holdability) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) @@ -1392,8 +1387,7 @@ */ public boolean supportsResultSetType(int type) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return true; } /* (non-Javadoc) Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java (revision 786634) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java (working copy) @@ -25,14 +25,21 @@ import java.sql.Timestamp; import java.util.Calendar; +import org.apache.hadoop.hive.service.HiveInterface; + public class HivePreparedStatement implements java.sql.PreparedStatement { String sql; + JdbcSessionState session; + HiveInterface client; + /** * */ - public HivePreparedStatement(String sql) { + public HivePreparedStatement(JdbcSessionState session, HiveInterface client, String sql) { + this.session = session; + this.client = client; this.sql = sql; } @@ -51,7 +58,7 @@ public void clearParameters() throws SQLException { // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + //throw new SQLException("Method not supported"); } /* (non-Javadoc) @@ -68,8 +75,12 @@ */ public ResultSet executeQuery() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + try { + client.execute(sql); + } catch (Exception ex) { + throw new SQLException(ex.toString()); + } + return new HiveResultSet(client); } /* (non-Javadoc) @@ -901,7 +912,7 @@ public void setMaxRows(int max) throws SQLException { // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + //throw new SQLException("Method not supported"); } /* (non-Javadoc) @@ -919,7 +930,7 @@ public void setQueryTimeout(int seconds) throws SQLException { // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + // throw new SQLException("Method not supported"); } /* (non-Javadoc) Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java (revision 786634) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java (working copy) @@ -21,6 +21,7 @@ import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; +import java.util.Arrays; import java.util.Calendar; import java.util.Map; import java.util.List; @@ -38,6 +39,8 @@ HiveInterface client; ArrayList row; DynamicSerDe ds; + List columnNames; + List columnTypes; /** * @@ -54,15 +57,23 @@ */ public void initDynamicSerde() { try { + String fullSchema = client.getSchema(); + String[] schemaParts = fullSchema.split("#"); + if (schemaParts.length > 2) { + columnNames = Arrays.asList(schemaParts[1].split(",")); + columnTypes = Arrays.asList(schemaParts[2].split(",")); + } ds = new DynamicSerDe(); Properties dsp = new Properties(); dsp.setProperty(Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); dsp.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "result"); - dsp.setProperty(Constants.SERIALIZATION_DDL, client.getSchema()); + dsp.setProperty(Constants.SERIALIZATION_DDL, schemaParts[0]); dsp.setProperty(Constants.SERIALIZATION_LIB, ds.getClass().toString()); dsp.setProperty(Constants.FIELD_DELIM, "9"); ds.initialize(new Configuration(), dsp); } catch (Exception ex) { + ex.printStackTrace(); + System.exit(1); // TODO: Decide what to do here. } } @@ -547,8 +558,7 @@ */ public ResultSetMetaData getMetaData() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return new HiveResultSetMetaData(columnNames, columnTypes); } /* (non-Javadoc) @@ -989,6 +999,7 @@ row = (ArrayList)o; } } catch (Exception ex) { + ex.printStackTrace(); throw new SQLException("Error retrieving next row"); } // NOTE: fetchOne dosn't throw new SQLException("Method not supported"). Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (revision 786634) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (working copy) @@ -4,9 +4,16 @@ package org.apache.hadoop.hive.jdbc; import java.sql.SQLException; +import java.util.List; - public class HiveResultSetMetaData implements java.sql.ResultSetMetaData { + List columnNames; + List columnTypes; + + public HiveResultSetMetaData(List columnNames, List columnTypes) { + this.columnNames = columnNames; + this.columnTypes = columnTypes; + } /* (non-Javadoc) * @see java.sql.ResultSetMetaData#getCatalogName(int) @@ -31,8 +38,7 @@ */ public int getColumnCount() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return columnNames.size(); } /* (non-Javadoc) @@ -50,7 +56,7 @@ public String getColumnLabel(int column) throws SQLException { // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return columnNames.get(column-1); } /* (non-Javadoc) @@ -58,8 +64,7 @@ */ public String getColumnName(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return columnNames.get(column-1); } /* (non-Javadoc) @@ -76,8 +81,7 @@ */ public String getColumnTypeName(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return columnTypes.get(column-1); } /* (non-Javadoc) Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java (revision 786634) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java (working copy) @@ -242,8 +242,7 @@ */ public DatabaseMetaData getMetaData() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return new HiveDatabaseMetaData(); } /* (non-Javadoc) @@ -344,7 +343,7 @@ */ public PreparedStatement prepareStatement(String sql) throws SQLException { - return new HivePreparedStatement(sql); + return new HivePreparedStatement(session, client, sql); } /* (non-Javadoc) @@ -353,7 +352,7 @@ public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { - return new HivePreparedStatement(sql); + return new HivePreparedStatement(session, client, sql); } /* (non-Javadoc) @@ -382,8 +381,7 @@ public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return new HivePreparedStatement(session, client, sql); } /* (non-Javadoc) Index: metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (revision 786634) +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (working copy) @@ -400,6 +400,31 @@ } return thriftType.toString(); } + + public static String getFullDDLFromFieldSchema(String structName, List fieldSchemas) { + StringBuilder ddl = new StringBuilder(); + ddl.append(getDDLFromFieldSchema(structName, fieldSchemas)); + ddl.append('#'); + StringBuilder colnames = new StringBuilder(); + StringBuilder coltypes = new StringBuilder(); + boolean first = true; + for (FieldSchema col: fieldSchemas) { + if (first) { + first = false; + } + else { + colnames.append(','); + coltypes.append(':'); + } + colnames.append(col.getName()); + coltypes.append(col.getType()); + } + ddl.append(colnames); + ddl.append('#'); + ddl.append(coltypes); + return ddl.toString(); + } + /** Convert FieldSchemas to Thrift DDL. */ public static String getDDLFromFieldSchema(String structName, List fieldSchemas) { @@ -419,6 +444,7 @@ ddl.append(col.getName()); } ddl.append("}"); + LOG.info("DDL: " + ddl); return ddl.toString(); } Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 786634) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -86,23 +86,33 @@ * Return the Thrift DDL string of the result */ public String getSchema() throws Exception { - if (plan != null && plan.getPlan().getFetchTask() != null) { - BaseSemanticAnalyzer sem = plan.getPlan(); + String schema = ""; + try { + if (plan != null && plan.getPlan().getFetchTask() != null) { + BaseSemanticAnalyzer sem = plan.getPlan(); - if (!sem.getFetchTaskInit()) { - sem.setFetchTaskInit(true); - sem.getFetchTask().initialize(conf); + if (!sem.getFetchTaskInit()) { + sem.setFetchTaskInit(true); + sem.getFetchTask().initialize(conf); + } + FetchTask ft = (FetchTask) sem.getFetchTask(); + + tableDesc td = ft.getTblDesc(); + String tableName = "result"; + List lst = MetaStoreUtils.getFieldsFromDeserializer( + tableName, td.getDeserializer()); + schema = MetaStoreUtils.getFullDDLFromFieldSchema(tableName, lst); } - FetchTask ft = (FetchTask) sem.getFetchTask(); - - tableDesc td = ft.getTblDesc(); - String tableName = "result"; - List lst = MetaStoreUtils.getFieldsFromDeserializer( - tableName, td.getDeserializer()); - String schema = MetaStoreUtils.getDDLFromFieldSchema(tableName, lst); - return schema; + else { + schema = "struct result { string empty }"; + } } - return null; + catch (Exception e) { + e.printStackTrace(); + throw e; + } + LOG.info("Returning schema: " + schema); + return schema; } /**