diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 28d8f52..479d545 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -705,6 +705,12 @@ HIVE_DDL_OUTPUT_FORMAT("hive.ddl.output.format", null), HIVE_ENTITY_SEPARATOR("hive.entity.separator", "@"), + //Whether to do space padding, indentation for human readability + //controls 'describe table' and 'show columns' output. But formatting + // keywords in query can override this. + //Automatically set by hive-server2 + HIVE_HUMAN_FRIENDLY_FORMAT("hive.human.friendly.format", true), + HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS("hive.server2.thrift.min.worker.threads", 5), HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 100), diff --git a/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java b/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java index e492663..8a58bb8 100644 --- a/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java +++ b/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -820,17 +820,39 @@ public void testDescribeTable() throws SQLException { ResultSet res = stmt.executeQuery("describe " + tableName); res.next(); - assertEquals("Column name 'under_col' not found", "under_col", res.getString(1).trim()); - assertEquals("Column type 'under_col' for column under_col not found", "int", res - .getString(2).trim()); + assertEquals("Column name 'under_col' not found", "under_col", res.getString(1)); + assertEquals("Column type 'under_col' for column under_col not found", + "int",res.getString(2)); + assertEquals("Column comment for column under_col not found", + "the under column", res.getString(3)); + res.next(); assertEquals("Column name 'value' not found", "value", res.getString(1).trim()); assertEquals("Column type 'string' for column key not found", "string", res - .getString(2).trim()); + .getString(2)); + assertEquals("Column comment for key not found", + "", res.getString(3)); + + assertFalse("More results found than expected", res.next()); + } + + public void testShowColumns() throws SQLException { + Statement stmt = con.createStatement(); + assertNotNull("Statement is null", stmt); + + ResultSet res = stmt.executeQuery("show columns in " + tableName); + res.next(); + assertEquals("Column name 'under_col' not found", + "under_col", res.getString(1)); + + res.next(); + assertEquals("Column name 'value' not found", + "value", res.getString(1).trim()); assertFalse("More results found than expected", res.next()); } + public void testDatabaseMetaData() throws SQLException { DatabaseMetaData meta = con.getMetaData(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 939defc..9893f35 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -40,10 +40,10 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; -import java.util.Map.Entry; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; @@ -113,8 +113,9 @@ import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition; +import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; @@ -152,7 +153,6 @@ import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TruncateTableDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.serde.serdeConstants; @@ -2332,8 +2332,12 @@ public int showColumns(Hive db, ShowColumnsDesc showCols) List cols = table.getCols(); cols.addAll(table.getPartCols()); + boolean humanFriendly = + db.getConf().getBoolVar(ConfVars.HIVE_HUMAN_FRIENDLY_FORMAT); + outStream.writeBytes( - MetaDataFormatUtils.getAllColumnsInformation(cols, false)); + MetaDataFormatUtils.getAllColumnsInformation(cols, false, humanFriendly)); + ((FSDataOutputStream) outStream).close(); outStream = null; } catch (IOException e) { @@ -2911,9 +2915,13 @@ private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException { } else { cols = Hive.getFieldsFromDeserializer(colPath, tbl.getDeserializer()); } - + boolean humanFriendly = db.getConf().getBoolVar(ConfVars.HIVE_HUMAN_FRIENDLY_FORMAT); formatter.describeTable(outStream, colPath, tableName, tbl, part, cols, - descTbl.isFormatted(), descTbl.isExt(), descTbl.isPretty()); + descTbl.isFormatted(), + descTbl.isExt(), + descTbl.isPretty(), + humanFriendly + ); LOG.info("DDLTask: written data for " + tbl.getTableName()); ((FSDataOutputStream) outStream).close(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java index e24f5f9..0fc0fc4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java @@ -19,14 +19,15 @@ package org.apache.hadoop.hive.ql.metadata.formatting; import java.io.DataOutputStream; -import java.io.OutputStream; import java.io.IOException; +import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; + import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -146,7 +147,7 @@ public void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part, List cols, boolean isFormatted, boolean isExt, - boolean isPretty) + boolean isPretty, boolean humanFriendly) throws HiveException { MapBuilder builder = MapBuilder.create(); @@ -154,10 +155,11 @@ public void describeTable(DataOutputStream out, builder.put("columns", makeColsUnformatted(cols)); if (isExt) { - if (part != null) - builder.put("partitionInfo", part.getTPartition()); - else - builder.put("tableInfo", tbl.getTTable()); + if (part != null) { + builder.put("partitionInfo", part.getTPartition()); + } else { + builder.put("tableInfo", tbl.getTTable()); + } } asJson(out, builder.build()); @@ -165,8 +167,9 @@ public void describeTable(DataOutputStream out, private List> makeColsUnformatted(List cols) { ArrayList> res = new ArrayList>(); - for (FieldSchema col : cols) - res.add(makeOneColUnformatted(col)); + for (FieldSchema col : cols) { + res.add(makeOneColUnformatted(col)); + } return res; } @@ -202,8 +205,9 @@ public void showTableStatus(DataOutputStream out, { try { ArrayList> res = new ArrayList>(); - for (Table tbl : tbls) - res.add(makeOneTableStatus(tbl, db, conf, part, par)); + for (Table tbl : tbls) { + res.add(makeOneTableStatus(tbl, db, conf, part, par)); + } return res; } catch(IOException e) { throw new HiveException(e); @@ -246,8 +250,9 @@ public void showTableStatus(DataOutputStream out, builder.put("columns", makeColsUnformatted(tbl.getCols())); builder.put("partitioned", tbl.isPartitioned()); - if (tbl.isPartitioned()) - builder.put("partitionColumns", makeColsUnformatted(tbl.getPartCols())); + if (tbl.isPartitioned()) { + builder.put("partitionColumns", makeColsUnformatted(tbl.getPartCols())); + } putFileSystemsStats(builder, makeTableStatusLocations(tbl, db, par), conf, tbl.getPath()); @@ -380,8 +385,9 @@ public void showTablePartitons(DataOutputStream out, List parts) { try { ArrayList> res = new ArrayList>(); - for (String part : parts) - res.add(makeOneTablePartition(part)); + for (String part : parts) { + res.add(makeOneTablePartition(part)); + } return res; } catch (UnsupportedEncodingException e) { throw new HiveException(e); @@ -401,13 +407,15 @@ public void showTablePartitons(DataOutputStream out, List parts) String[] kv = StringUtils.split(part, "=", 2); if (kv != null) { name = kv[0]; - if (kv.length > 1) - val = URLDecoder.decode(kv[1], "UTF-8"); + if (kv.length > 1) { + val = URLDecoder.decode(kv[1], "UTF-8"); + } + } + if (val != null) { + names.add(name + "='" + val + "'"); + } else { + names.add(name); } - if (val != null) - names.add(name + "='" + val + "'"); - else - names.add(name); res.add(MapBuilder.create() .put("columnName", name) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java index b08fb76..111de43 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java @@ -25,7 +25,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Set; import org.apache.commons.lang.StringEscapeUtils; import org.apache.hadoop.hive.metastore.TableType; @@ -64,39 +63,54 @@ private static void formatColumnsHeader(StringBuilder columnInformation) { } public static String getAllColumnsInformation(List cols, - boolean printHeader) { + boolean printHeader, boolean humanFriendly) { StringBuilder columnInformation = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); if(printHeader){ formatColumnsHeader(columnInformation); } - formatAllFields(columnInformation, cols); + formatAllFields(columnInformation, cols, humanFriendly); return columnInformation.toString(); } - public static String getAllColumnsInformation(List cols, List partCols, - boolean printHeader) { + public static String getAllColumnsInformation(List cols, + List partCols, boolean printHeader, boolean humanFriendly) { StringBuilder columnInformation = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); if(printHeader){ formatColumnsHeader(columnInformation); } - formatAllFields(columnInformation, cols); + formatAllFields(columnInformation, cols, humanFriendly); if ((partCols != null) && (!partCols.isEmpty())) { columnInformation.append(LINE_DELIM).append("# Partition Information") .append(LINE_DELIM); formatColumnsHeader(columnInformation); - formatAllFields(columnInformation, partCols); + formatAllFields(columnInformation, partCols, humanFriendly); } return columnInformation.toString(); } - private static void formatAllFields(StringBuilder tableInfo, List cols) { + private static void formatAllFields(StringBuilder tableInfo, + List cols, boolean humanFriendly) { for (FieldSchema col : cols) { - formatOutput(col.getName(), col.getType(), getComment(col), tableInfo); + if(humanFriendly){ + formatWithIndentation(col.getName(), col.getType(), getComment(col), tableInfo); + }else { + formatWithoutIndentation(col.getName(), col.getType(), col.getComment(), tableInfo); + } } } + private static void formatWithoutIndentation(String name, String type, String comment, + StringBuilder colBuffer) { + colBuffer.append(name); + colBuffer.append(FIELD_DELIM); + colBuffer.append(type); + colBuffer.append(FIELD_DELIM); + colBuffer.append(comment == null ? "" : comment); + colBuffer.append(LINE_DELIM); + } + public static String getAllColumnsInformation(Index index) { StringBuilder indexInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); @@ -297,7 +311,7 @@ private static void formatOutput(String name, String value, tableInfo.append(String.format("%-" + ALIGNMENT + "s", value)).append(LINE_DELIM); } - private static void formatOutput(String colName, String colType, String colComment, + private static void formatWithIndentation(String colName, String colType, String colComment, StringBuilder tableInfo) { tableInfo.append(String.format("%-" + ALIGNMENT + "s", colName)).append(FIELD_DELIM); tableInfo.append(String.format("%-" + ALIGNMENT + "s", colType)).append(FIELD_DELIM); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java index 6632afe..f4774ab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java @@ -23,6 +23,7 @@ import java.util.List; import java.util.Map; import java.util.Set; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -90,11 +91,23 @@ public void showTables(DataOutputStream out, Set tables) /** * Describe table. + * @param out + * @param colPath + * @param tableName + * @param tbl + * @param part + * @param cols + * @param isFormatted - describe with formatted keyword + * @param isExt + * @param isPretty + * @param humanFriendly - if true, add spacing and indentation + * @throws HiveException */ public void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part, List cols, - boolean isFormatted, boolean isExt, boolean isPretty) + boolean isFormatted, boolean isExt, + boolean isPretty, boolean humanFriendly) throws HiveException; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java index 97857f4..c2f6443 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java @@ -67,6 +67,7 @@ public TextMetaDataFormatter(int prettyOutputNumCols) { /** * Write an error message. */ + @Override public void error(OutputStream out, String msg, int errorCode) throws HiveException { @@ -81,6 +82,7 @@ public void error(OutputStream out, String msg, int errorCode) /** * Write a log warn message. */ + @Override public void logWarn(OutputStream out, String msg, int errorCode) throws HiveException { @@ -99,6 +101,7 @@ public void logInfo(OutputStream out, String msg, int errorCode) /** * Write a console error message. */ + @Override public void consoleError(LogHelper console, String msg, int errorCode) { console.printError(msg); } @@ -106,6 +109,7 @@ public void consoleError(LogHelper console, String msg, int errorCode) { /** * Write a console error message. */ + @Override public void consoleError(LogHelper console, String msg, String detail, int errorCode) { @@ -115,6 +119,7 @@ public void consoleError(LogHelper console, String msg, String detail, /** * Show a list of tables. */ + @Override public void showTables(DataOutputStream out, Set tables) throws HiveException { @@ -131,10 +136,12 @@ public void showTables(DataOutputStream out, Set tables) } } + @Override public void describeTable(DataOutputStream outStream, String colPath, String tableName, Table tbl, Partition part, List cols, - boolean isFormatted, boolean isExt, boolean isPretty) + boolean isFormatted, boolean isExt, + boolean isPretty, boolean humanFriendly) throws HiveException { try { if (colPath.equals(tableName)) { @@ -144,11 +151,12 @@ public void describeTable(DataOutputStream outStream, MetaDataPrettyFormatUtils.getAllColumnsInformation( cols, partCols, prettyOutputNumCols) : - MetaDataFormatUtils.getAllColumnsInformation(cols, partCols, isFormatted) + MetaDataFormatUtils.getAllColumnsInformation(cols, partCols, + isFormatted, humanFriendly) ); } else { outStream.writeBytes( - MetaDataFormatUtils.getAllColumnsInformation(cols, isFormatted)); + MetaDataFormatUtils.getAllColumnsInformation(cols, isFormatted, humanFriendly)); } if (tableName.equals(colPath)) { @@ -187,6 +195,7 @@ public void describeTable(DataOutputStream outStream, } } + @Override public void showTableStatus(DataOutputStream outStream, Hive db, HiveConf conf, @@ -406,6 +415,7 @@ private void writeFileSystemStats(DataOutputStream outStream, /** * Show the table partitions. */ + @Override public void showTablePartitons(DataOutputStream outStream, List parts) throws HiveException { @@ -430,6 +440,7 @@ public void showTablePartitons(DataOutputStream outStream, List parts) /** * Show the list of databases */ + @Override public void showDatabases(DataOutputStream outStream, List databases) throws HiveException { @@ -447,6 +458,7 @@ public void showDatabases(DataOutputStream outStream, List databases) /** * Describe a database */ + @Override public void showDatabaseDescription(DataOutputStream outStream, String database, String comment, diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index 7254491..a1ae552 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -18,7 +18,6 @@ package org.apache.hive.service.cli.session; -import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; @@ -26,7 +25,6 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.io.FileUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -82,6 +80,8 @@ public HiveSessionImpl(String username, String password, Map ses this.username = username; this.password = password; + setHiveServer2Configs(); + if (sessionConf != null) { for (Map.Entry entry : sessionConf.entrySet()) { hiveConf.set(entry.getKey(), entry.getValue()); @@ -93,6 +93,18 @@ public HiveSessionImpl(String username, String password, Map ses sessionState = new SessionState(hiveConf); } + /** + * Set configurations recommended for hive-server2 + */ + private void setHiveServer2Configs() { + //as the results are meant to be consumed by java code, turn off + //human friendly format, so that additional indentation and space padding + // is not done + hiveConf.setBoolVar(ConfVars.HIVE_HUMAN_FRIENDLY_FORMAT, false); + } + + + private SessionManager getSessionManager() { return sessionManager; }