Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java (revision 11513) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java (working copy) @@ -48,18 +48,18 @@ private MetaDataFormatUtils() { } - public static String getAllColumnsInformation(Table table) { + public static String getAllColumnsInformation(Table table, boolean noComments) { StringBuilder columnInformation = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); formatColumnsHeader(columnInformation); - formatAllFields(columnInformation, table.getCols()); + formatAllFields(columnInformation, table.getCols(), noComments); // Partitions if (table.isPartitioned()) { columnInformation.append(LINE_DELIM).append("# Partition Information") .append(LINE_DELIM); formatColumnsHeader(columnInformation); - formatAllFields(columnInformation, table.getPartCols()); + formatAllFields(columnInformation, table.getPartCols(), noComments); } return columnInformation.toString(); } @@ -70,16 +70,17 @@ columnInformation.append(LINE_DELIM); } - public static String getAllColumnsInformation(List cols) { + public static String getAllColumnsInformation(List cols, boolean noComment) { StringBuilder columnInformation = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); formatColumnsHeader(columnInformation); - formatAllFields(columnInformation, cols); + formatAllFields(columnInformation, cols, noComment); return columnInformation.toString(); } - private static void formatAllFields(StringBuilder tableInfo, List cols) { + private static void formatAllFields(StringBuilder tableInfo, List cols, + boolean noComment) { for (FieldSchema col : cols) { - formatFieldSchemas(tableInfo, col); + formatFieldSchemas(tableInfo, col, noComment); } } @@ -125,14 +126,14 @@ /* Displaying columns unformatted for backward compatibility. */ - public static String displayColsUnformatted(List cols) { + public static String displayColsUnformatted(List cols, boolean noComment) { StringBuilder colBuffer = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); for (FieldSchema col : cols) { colBuffer.append(col.getName()); colBuffer.append(FIELD_DELIM); colBuffer.append(col.getType()); colBuffer.append(FIELD_DELIM); - colBuffer.append(col.getComment() == null ? "" : col.getComment()); + colBuffer.append(col.getComment() == null || noComment? "" : col.getComment()); colBuffer.append(LINE_DELIM); } return colBuffer.toString(); @@ -238,8 +239,12 @@ } } - private static void formatFieldSchemas(StringBuilder tableInfo, FieldSchema col) { - String comment = col.getComment() != null ? col.getComment() : "None"; + private static void formatFieldSchemas(StringBuilder tableInfo, FieldSchema col, + boolean noComment) { + String comment = ""; + if (!noComment) { + comment = col.getComment() != null ? col.getComment() : "None"; + } formatOutput(col.getName(), col.getType(), comment, tableInfo); } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 11513) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -37,10 +37,10 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; +import java.util.Map.Entry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -93,7 +93,6 @@ import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; @@ -126,9 +125,9 @@ import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.Privilege; -import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; @@ -139,12 +138,11 @@ import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.ShimLoader; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; /** * DDLTask implementation. - * + * **/ public class DDLTask extends Task implements Serializable { private static final long serialVersionUID = 1L; @@ -567,7 +565,7 @@ String tableName = null; Table tableObj = null; Database dbObj = null; - + try { if (privSubjectDesc != null) { @@ -670,7 +668,7 @@ } } } - + for (PrincipalDesc principal : principals) { for (int i = 0; i < privBag.getPrivileges().size(); i++) { HiveObjectPrivilege objPrivs = privBag.getPrivileges().get(i); @@ -807,7 +805,7 @@ /** * Add a partition to a table. - * + * * @param db * Database to add the partition to. * @param addPartitionDesc @@ -2246,16 +2244,16 @@ if (tableName.equals(colPath)) { cols.addAll(tbl.getPartCols()); } - outStream.writeBytes(MetaDataFormatUtils.displayColsUnformatted(cols)); + outStream.writeBytes(MetaDataFormatUtils.displayColsUnformatted(cols, descTbl.isNoComment())); } else { - outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(tbl)); + outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(tbl, descTbl.isNoComment())); } } else { List cols = Hive.getFieldsFromDeserializer(colPath, tbl.getDeserializer()); if (descTbl.isFormatted()) { - outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(cols)); + outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(cols, descTbl.isNoComment())); } else { - outStream.writeBytes(MetaDataFormatUtils.displayColsUnformatted(cols)); + outStream.writeBytes(MetaDataFormatUtils.displayColsUnformatted(cols, descTbl.isNoComment())); } } @@ -2314,9 +2312,9 @@ PrivilegeGrantInfo grantInfo) throws IOException { String privilege = grantInfo.getPrivilege(); - int createTime = grantInfo.getCreateTime(); + int createTime = grantInfo.getCreateTime(); String grantor = grantInfo.getGrantor(); - + if (dbName != null) { writeKeyValuePair(outStream, "database", dbName); } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java (revision 11513) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java (working copy) @@ -40,6 +40,9 @@ String resFile; boolean isExt; boolean isFormatted; + // Whether to hide comments + boolean isNoComment; + /** * table name for the result of describe table. */ @@ -61,6 +64,7 @@ HashMap partSpec) { this.isExt = false; this.isFormatted = false; + this.isNoComment = false; this.partSpec = partSpec; this.resFile = resFile.toString(); this.tableName = tableName; @@ -105,6 +109,19 @@ } /** + * @return the no comment + */ + public boolean isNoComment() { + return this.isNoComment; + } + + /** + * + */ + public void setNoComment(boolean isNoComment) { + this.isNoComment = isNoComment; + } + /** * @return the tableName */ @Explain(displayName = "table") Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 11513) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -720,7 +720,7 @@ descStatement @init { msgs.push("describe statement"); } @after { msgs.pop(); } - : (KW_DESCRIBE|KW_DESC) (descOptions=KW_FORMATTED|descOptions=KW_EXTENDED)? (parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions?) + : (KW_DESCRIBE|KW_DESC) (descOptions1=KW_FORMATTED|descOptions1=KW_EXTENDED)? (descOptions2=KW_NOCOMMENT)? (parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions1? $descOptions2?) | (KW_DESCRIBE|KW_DESC) KW_FUNCTION KW_EXTENDED? (name=descFuncNames) -> ^(TOK_DESCFUNCTION $name KW_EXTENDED?) | (KW_DESCRIBE|KW_DESC) KW_DATABASE KW_EXTENDED? (dbName=Identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?) ; @@ -2072,6 +2072,7 @@ KW_EXPLAIN: 'EXPLAIN'; KW_EXTENDED: 'EXTENDED'; KW_FORMATTED: 'FORMATTED'; +KW_NOCOMMENT: 'NOCOMMENT'; KW_SERDE: 'SERDE'; KW_WITH: 'WITH'; KW_DEFERRED: 'DEFERRED'; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 11513) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -30,9 +30,9 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Properties; import java.util.Set; +import java.util.Map.Entry; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; @@ -53,8 +53,8 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.index.HiveIndex; +import org.apache.hadoop.hive.ql.index.HiveIndexHandler; import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType; -import org.apache.hadoop.hive.ql.index.HiveIndexHandler; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -64,19 +64,10 @@ import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; -import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; -import org.apache.hadoop.hive.ql.plan.GrantDesc; -import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; -import org.apache.hadoop.hive.ql.plan.RevokeDesc; -import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.DescDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DescFunctionDesc; @@ -85,8 +76,15 @@ import org.apache.hadoop.hive.ql.plan.DropIndexDesc; import org.apache.hadoop.hive.ql.plan.DropTableDesc; import org.apache.hadoop.hive.ql.plan.FetchWork; +import org.apache.hadoop.hive.ql.plan.GrantDesc; +import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL; import org.apache.hadoop.hive.ql.plan.LockTableDesc; import org.apache.hadoop.hive.ql.plan.MsckDesc; +import org.apache.hadoop.hive.ql.plan.PrincipalDesc; +import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; +import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; +import org.apache.hadoop.hive.ql.plan.RevokeDesc; +import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; @@ -98,6 +96,8 @@ import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry; import org.apache.hadoop.hive.ql.session.SessionState; @@ -357,7 +357,7 @@ private void analyzeShowGrant(ASTNode ast) throws SemanticException { PrivilegeObjectDesc privHiveObj = null; - + ASTNode principal = (ASTNode) ast.getChild(0); PrincipalType type = PrincipalType.USER; switch (principal.getType()) { @@ -384,22 +384,22 @@ ASTNode grandChild = (ASTNode) child.getChild(i); if (grandChild.getToken().getType() == HiveParser.TOK_PARTSPEC) { privHiveObj.setPartSpec(DDLSemanticAnalyzer.getPartSpec(grandChild)); - } else if (grandChild.getToken().getType() == HiveParser.TOK_TABCOLNAME) { + } else if (grandChild.getToken().getType() == HiveParser.TOK_TABCOLNAME) { cols = getColumnNames((ASTNode) grandChild); } else { - privHiveObj.setTable(child.getChild(i) != null); + privHiveObj.setTable(child.getChild(i) != null); } } } } } - + if (privHiveObj == null && cols != null) { throw new SemanticException( "For user-level privileges, column sets should be null. columns=" + cols.toString()); } - + ShowGrantDesc showGrant = new ShowGrantDesc(ctx.getResFile().toString(), principalDesc, privHiveObj, cols); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), @@ -421,10 +421,10 @@ grantOption = true; } else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) { privilegeObj = analyzePrivilegeObject(astChild); - } + } } } - + String userName = null; if (SessionState.get() != null && SessionState.get().getAuthenticator() != null) { @@ -436,7 +436,7 @@ rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), grantDesc), conf)); } - + private void analyzeRevoke(ASTNode ast) throws SemanticException { List privilegeDesc = analyzePrivilegeListDef( (ASTNode) ast.getChild(0)); @@ -447,13 +447,13 @@ ASTNode astChild = (ASTNode) ast.getChild(2); hiveObj = analyzePrivilegeObject(astChild); } - + RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), revokeDesc), conf)); } - + private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast) throws SemanticException { PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); @@ -470,10 +470,10 @@ } return subject; } - + private List analyzePrincipalListDef(ASTNode node) { List principalList = new ArrayList(); - + for (int i = 0; i < node.getChildCount(); i++) { ASTNode child = (ASTNode) node.getChild(i); PrincipalType type = null; @@ -492,7 +492,7 @@ PrincipalDesc principalDesc = new PrincipalDesc(principalName, type); principalList.add(principalDesc); } - + return principalList; } @@ -501,7 +501,7 @@ List ret = new ArrayList(); for (int i = 0; i < node.getChildCount(); i++) { ASTNode privilegeDef = (ASTNode) node.getChild(i); - + String privilegeStr = unescapeIdentifier(privilegeDef.getChild(0) .getText()); Privilege privObj = PrivilegeRegistry.getPrivilege(privilegeStr); @@ -1170,10 +1170,17 @@ } DescTableDesc descTblDesc = new DescTableDesc(ctx.getResFile(), tableName, partSpec); - if (ast.getChildCount() == 2) { - int descOptions = ast.getChild(1).getType(); - descTblDesc.setFormatted(descOptions == HiveParser.KW_FORMATTED); - descTblDesc.setExt(descOptions == HiveParser.KW_EXTENDED); + if (ast.getChildCount() >= 2) { + for (int i=0; i