diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 83f337b..d8bf199 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -722,6 +722,12 @@ HIVE_DDL_OUTPUT_FORMAT("hive.ddl.output.format", null), HIVE_ENTITY_SEPARATOR("hive.entity.separator", "@"), + //Whether to do space padding, indentation for human readability + //controls 'describe table' and 'show columns' output. But formatting + // keywords in query can override this. + //Automatically set by hive-server2 + HIVE_HUMAN_FRIENDLY_FORMAT("hive.human.friendly.format", true), + HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS("hive.server2.thrift.min.worker.threads", 5), HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 100), diff --git a/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java b/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java index f35a351..b4ec5a3 100644 --- a/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java +++ b/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -915,17 +915,39 @@ public void testDescribeTable() throws SQLException { ResultSet res = stmt.executeQuery("describe " + tableName); res.next(); - assertEquals("Column name 'under_col' not found", "under_col", res.getString(1).trim()); - assertEquals("Column type 'under_col' for column under_col not found", "int", res - .getString(2).trim()); + assertEquals("Column name 'under_col' not found", "under_col", res.getString(1)); + assertEquals("Column type 'under_col' for column under_col not found", + "int",res.getString(2)); + assertEquals("Column comment for column under_col not found", + "the under column", res.getString(3)); + res.next(); assertEquals("Column name 'value' not found", "value", res.getString(1).trim()); assertEquals("Column type 'string' for column key not found", "string", res - .getString(2).trim()); + .getString(2)); + assertEquals("Column comment for key not found", + "", res.getString(3)); + + assertFalse("More results found than expected", res.next()); + } + + public void testShowColumns() throws SQLException { + Statement stmt = con.createStatement(); + assertNotNull("Statement is null", stmt); + + ResultSet res = stmt.executeQuery("show columns in " + tableName); + res.next(); + assertEquals("Column name 'under_col' not found", + "under_col", res.getString(1)); + + res.next(); + assertEquals("Column name 'value' not found", + "value", res.getString(1).trim()); assertFalse("More results found than expected", res.next()); } + public void testDatabaseMetaData() throws SQLException { DatabaseMetaData meta = con.getMetaData(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 4dcb260..ef9bbdc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -40,10 +40,10 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; -import java.util.Map.Entry; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; @@ -112,8 +112,9 @@ import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition; +import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; @@ -151,7 +152,6 @@ import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TruncateTableDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.serde.serdeConstants; @@ -2281,8 +2281,12 @@ public int showColumns(Hive db, ShowColumnsDesc showCols) List cols = table.getCols(); cols.addAll(table.getPartCols()); + boolean humanFriendly = + db.getConf().getBoolVar(ConfVars.HIVE_HUMAN_FRIENDLY_FORMAT); + outStream.writeBytes( - MetaDataFormatUtils.getAllColumnsInformation(cols, false)); + MetaDataFormatUtils.getAllColumnsInformation(cols, false, humanFriendly)); + ((FSDataOutputStream) outStream).close(); outStream = null; } catch (IOException e) { @@ -2828,9 +2832,13 @@ private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException { } else { cols = Hive.getFieldsFromDeserializer(colPath, tbl.getDeserializer()); } - + boolean humanFriendly = db.getConf().getBoolVar(ConfVars.HIVE_HUMAN_FRIENDLY_FORMAT); formatter.describeTable(outStream, colPath, tableName, tbl, part, cols, - descTbl.isFormatted(), descTbl.isExt(), descTbl.isPretty()); + descTbl.isFormatted(), + descTbl.isExt(), + descTbl.isPretty(), + humanFriendly + ); LOG.info("DDLTask: written data for " + tbl.getTableName()); outStream.close(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java index a85a19d..f31c1fb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java @@ -19,14 +19,15 @@ package org.apache.hadoop.hive.ql.metadata.formatting; import java.io.DataOutputStream; -import java.io.OutputStream; import java.io.IOException; +import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; + import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -106,7 +107,7 @@ public void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part, List cols, boolean isFormatted, boolean isExt, - boolean isPretty) + boolean isPretty, boolean humanFriendly) throws HiveException { MapBuilder builder = MapBuilder.create(); @@ -114,10 +115,11 @@ public void describeTable(DataOutputStream out, builder.put("columns", makeColsUnformatted(cols)); if (isExt) { - if (part != null) - builder.put("partitionInfo", part.getTPartition()); - else - builder.put("tableInfo", tbl.getTTable()); + if (part != null) { + builder.put("partitionInfo", part.getTPartition()); + } else { + builder.put("tableInfo", tbl.getTTable()); + } } asJson(out, builder.build()); @@ -125,8 +127,9 @@ public void describeTable(DataOutputStream out, private List> makeColsUnformatted(List cols) { ArrayList> res = new ArrayList>(); - for (FieldSchema col : cols) - res.add(makeOneColUnformatted(col)); + for (FieldSchema col : cols) { + res.add(makeOneColUnformatted(col)); + } return res; } @@ -163,8 +166,9 @@ public void showTableStatus(DataOutputStream out, { try { ArrayList> res = new ArrayList>(); - for (Table tbl : tbls) - res.add(makeOneTableStatus(tbl, db, conf, part, par)); + for (Table tbl : tbls) { + res.add(makeOneTableStatus(tbl, db, conf, part, par)); + } return res; } catch(IOException e) { throw new HiveException(e); @@ -207,8 +211,9 @@ public void showTableStatus(DataOutputStream out, builder.put("columns", makeColsUnformatted(tbl.getCols())); builder.put("partitioned", tbl.isPartitioned()); - if (tbl.isPartitioned()) - builder.put("partitionColumns", makeColsUnformatted(tbl.getPartCols())); + if (tbl.isPartitioned()) { + builder.put("partitionColumns", makeColsUnformatted(tbl.getPartCols())); + } putFileSystemsStats(builder, makeTableStatusLocations(tbl, db, par), conf, tbl.getPath()); @@ -342,8 +347,9 @@ public void showTablePartitons(DataOutputStream out, List parts) { try { ArrayList> res = new ArrayList>(); - for (String part : parts) - res.add(makeOneTablePartition(part)); + for (String part : parts) { + res.add(makeOneTablePartition(part)); + } return res; } catch (UnsupportedEncodingException e) { throw new HiveException(e); @@ -363,13 +369,15 @@ public void showTablePartitons(DataOutputStream out, List parts) String[] kv = StringUtils.split(part, "=", 2); if (kv != null) { name = kv[0]; - if (kv.length > 1) - val = URLDecoder.decode(kv[1], "UTF-8"); + if (kv.length > 1) { + val = URLDecoder.decode(kv[1], "UTF-8"); + } + } + if (val != null) { + names.add(name + "='" + val + "'"); + } else { + names.add(name); } - if (val != null) - names.add(name + "='" + val + "'"); - else - names.add(name); res.add(MapBuilder.create() .put("columnName", name) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java index 0d71891..32eac7c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java @@ -25,7 +25,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Set; import org.apache.commons.lang.StringEscapeUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -64,40 +63,80 @@ private static void formatColumnsHeader(StringBuilder columnInformation) { columnInformation.append(LINE_DELIM); } + /** + * Write formatted information about the given columns to a string + * @param cols - list of columns + * @param printHeader - if header should be included + * @param humanFriendly - make it more human readable by setting indentation + * with spaces. Turned off for use by HS2 + * @return string with formatted column information + */ public static String getAllColumnsInformation(List cols, - boolean printHeader) { + boolean printHeader, boolean humanFriendly) { StringBuilder columnInformation = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); if(printHeader){ formatColumnsHeader(columnInformation); } - formatAllFields(columnInformation, cols); + formatAllFields(columnInformation, cols, humanFriendly); return columnInformation.toString(); } - public static String getAllColumnsInformation(List cols, List partCols, - boolean printHeader) { + /** + * Write formatted information about the given columns, including partition + * columns to a string + * @param cols - list of columns + * @param partCols - list of partition columns + * @param printHeader - if header should be included + * @param humanFriendly - make it more human readable by setting indentation + * with spaces. Turned off for use by HS2 + * @return string with formatted column information + */ + public static String getAllColumnsInformation(List cols, + List partCols, boolean printHeader, boolean humanFriendly) { StringBuilder columnInformation = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); if(printHeader){ formatColumnsHeader(columnInformation); } - formatAllFields(columnInformation, cols); + formatAllFields(columnInformation, cols, humanFriendly); if ((partCols != null) && (!partCols.isEmpty())) { columnInformation.append(LINE_DELIM).append("# Partition Information") .append(LINE_DELIM); formatColumnsHeader(columnInformation); - formatAllFields(columnInformation, partCols); + formatAllFields(columnInformation, partCols, humanFriendly); } return columnInformation.toString(); } - private static void formatAllFields(StringBuilder tableInfo, List cols) { + /** + * Write formatted column information into given StringBuilder + * @param tableInfo - StringBuilder to append column information into + * @param cols - list of columns + * @param humanFriendly - make it more human readable by setting indentation + * with spaces. Turned off for use by HS2 + */ + private static void formatAllFields(StringBuilder tableInfo, + List cols, boolean humanFriendly) { for (FieldSchema col : cols) { - formatOutput(col.getName(), col.getType(), getComment(col), tableInfo); + if(humanFriendly){ + formatWithIndentation(col.getName(), col.getType(), getComment(col), tableInfo); + }else { + formatWithoutIndentation(col.getName(), col.getType(), col.getComment(), tableInfo); + } } } + private static void formatWithoutIndentation(String name, String type, String comment, + StringBuilder colBuffer) { + colBuffer.append(name); + colBuffer.append(FIELD_DELIM); + colBuffer.append(type); + colBuffer.append(FIELD_DELIM); + colBuffer.append(comment == null ? "" : comment); + colBuffer.append(LINE_DELIM); + } + public static String getAllColumnsInformation(Index index) { StringBuilder indexInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); @@ -298,7 +337,7 @@ private static void formatOutput(String name, String value, tableInfo.append(String.format("%-" + ALIGNMENT + "s", value)).append(LINE_DELIM); } - private static void formatOutput(String colName, String colType, String colComment, + private static void formatWithIndentation(String colName, String colType, String colComment, StringBuilder tableInfo) { tableInfo.append(String.format("%-" + ALIGNMENT + "s", colName)).append(FIELD_DELIM); tableInfo.append(String.format("%-" + ALIGNMENT + "s", colType)).append(FIELD_DELIM); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java index 4c40034..1e4bdeb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java @@ -23,6 +23,7 @@ import java.util.List; import java.util.Map; import java.util.Set; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -59,11 +60,23 @@ public void showTables(DataOutputStream out, Set tables) /** * Describe table. + * @param out + * @param colPath + * @param tableName + * @param tbl + * @param part + * @param cols + * @param isFormatted - describe with formatted keyword + * @param isExt + * @param isPretty + * @param humanFriendly - if true, add spacing and indentation + * @throws HiveException */ public void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part, List cols, - boolean isFormatted, boolean isExt, boolean isPretty) + boolean isFormatted, boolean isExt, + boolean isPretty, boolean humanFriendly) throws HiveException; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java index 0f48674..d4b1019 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java @@ -116,7 +116,8 @@ public void showTables(DataOutputStream out, Set tables) public void describeTable(DataOutputStream outStream, String colPath, String tableName, Table tbl, Partition part, List cols, - boolean isFormatted, boolean isExt, boolean isPretty) + boolean isFormatted, boolean isExt, + boolean isPretty, boolean humanFriendly) throws HiveException { try { String output; @@ -126,9 +127,11 @@ public void describeTable(DataOutputStream outStream, MetaDataPrettyFormatUtils.getAllColumnsInformation( cols, partCols, prettyOutputNumCols) : - MetaDataFormatUtils.getAllColumnsInformation(cols, partCols, isFormatted); + MetaDataFormatUtils.getAllColumnsInformation(cols, partCols, + isFormatted, humanFriendly); } else { - output = MetaDataFormatUtils.getAllColumnsInformation(cols, isFormatted); + output = MetaDataFormatUtils.getAllColumnsInformation(cols, + isFormatted, humanFriendly); } outStream.write(output.getBytes()); diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index 7254491..005be0d 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -18,7 +18,6 @@ package org.apache.hive.service.cli.session; -import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; @@ -26,7 +25,6 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.io.FileUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -82,17 +80,32 @@ public HiveSessionImpl(String username, String password, Map ses this.username = username; this.password = password; + //set hs2 default config + setHiveServer2Configs(); + + //set conf properties specified by user from client side if (sessionConf != null) { for (Map.Entry entry : sessionConf.entrySet()) { hiveConf.set(entry.getKey(), entry.getValue()); } } + // set an explicit session name to control the download directory name hiveConf.set(ConfVars.HIVESESSIONID.varname, sessionHandle.getHandleIdentifier().toString()); sessionState = new SessionState(hiveConf); } + /** + * Set configurations recommended for hive-server2 + */ + private void setHiveServer2Configs() { + //as the results are meant to be consumed by java code, turn off + //human friendly format, so that additional indentation and space padding + // is not done + hiveConf.setBoolVar(ConfVars.HIVE_HUMAN_FRIENDLY_FORMAT, false); + } + private SessionManager getSessionManager() { return sessionManager; }