Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java (revision 999101) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java (revision ) @@ -23,9 +23,7 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.plan.DescTableDesc; -import java.util.Date; -import java.util.List; -import java.util.Map; +import java.util.*; /** * This class provides methods to format table information. @@ -59,6 +57,7 @@ } private static void formatColumnsHeader(StringBuilder columnInformation) { + columnInformation.append("# "); // Easy for shell scripts to ignore formatOutput(getColumnsHeader(), columnInformation); columnInformation.append(LINE_DELIM); } @@ -76,17 +75,33 @@ } } + /* + Displaying columns unformatted for backward compatibility. + */ + public static String displayColsUnformatted(List cols) { + StringBuilder colBuffer = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); + for (FieldSchema col : cols) { + colBuffer.append(col.getName()); + colBuffer.append(FIELD_DELIM); + colBuffer.append(col.getType()); + colBuffer.append(FIELD_DELIM); + colBuffer.append(col.getComment() == null ? "" : col.getComment()); + colBuffer.append(LINE_DELIM); + } + return colBuffer.toString(); + } + public static String getPartitionInformation(Partition part) { StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); // Table Metadata - tableInfo.append("# Detailed Partition Information").append(LINE_DELIM); + tableInfo.append(LINE_DELIM).append("# Detailed Partition Information").append(LINE_DELIM); getPartitionMetaDataInformation(tableInfo, part); // Storage information. tableInfo.append(LINE_DELIM).append("# Storage Information").append(LINE_DELIM); getStorageDescriptorInfo(tableInfo, part.getTPartition().getSd()); - + tableInfo.append(LINE_DELIM); return tableInfo.toString(); } @@ -94,7 +109,7 @@ StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); // Table Metadata - tableInfo.append("# Detailed Table Information").append(LINE_DELIM); + tableInfo.append(LINE_DELIM).append("# Detailed Table Information").append(LINE_DELIM); getTableMetaDataInformation(tableInfo, table); // Storage information. @@ -105,7 +120,7 @@ tableInfo.append(LINE_DELIM).append("# View Information").append(LINE_DELIM); getViewInfo(tableInfo, table); } - + tableInfo.append(LINE_DELIM); return tableInfo.toString(); } @@ -168,10 +183,11 @@ } private static void displayAllParameters(Map params, StringBuilder tableInfo) { - for (Map.Entry parameter: params.entrySet()) { + List keys = new ArrayList(params.keySet()); + Collections.sort(keys); + for (String key : keys) { tableInfo.append(FIELD_DELIM); // Ensures all params are indented. - formatOutput(parameter.getKey(), StringEscapeUtils.escapeJava(parameter.getValue()), - tableInfo); + formatOutput(key, StringEscapeUtils.escapeJava(params.get(key)), tableInfo); } } @@ -181,9 +197,12 @@ } private static String formatDate(long timeInSeconds) { + if (timeInSeconds != 0) { - Date date = new Date(timeInSeconds * 1000); - return date.toString(); - } + Date date = new Date(timeInSeconds * 1000); + return date.toString(); + } + return "UNKNOWN"; + } private static void formatOutput(String[] fields, StringBuilder tableInfo) { for (String field : fields) { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1000539) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision ) @@ -1612,27 +1612,55 @@ FileSystem fs = resFile.getFileSystem(conf); DataOutput outStream = fs.create(resFile); + if (colPath.equals(tableName)) { + List cols = tbl.getCols(); + if (tableName.equals(colPath)) { + cols.addAll(tbl.getPartCols()); + } + if (!descTbl.isFormatted()) { + outStream.writeBytes(MetaDataFormatUtils.displayColsUnformatted(cols)); + } else { - outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(tbl)); + outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(tbl)); + } } else { - List cols = null; + List cols = null; cols = Hive.getFieldsFromDeserializer(colPath, tbl.getDeserializer()); + if (descTbl.isFormatted()) { - outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(cols)); + outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(cols)); + } else { + outStream.writeBytes(MetaDataFormatUtils.displayColsUnformatted(cols)); - } + } + } if (tableName.equals(colPath)) { + + if (descTbl.isFormatted()) { + if (part != null) { + outStream.writeBytes(MetaDataFormatUtils.getPartitionInformation(part)); + } else { + outStream.writeBytes(MetaDataFormatUtils.getTableInformation(tbl)); + } + } + // if extended desc table then show the complete details of the table if (descTbl.isExt()) { // add empty line outStream.write(terminator); if (part != null) { // show partition information - outStream.writeBytes(MetaDataFormatUtils.getPartitionInformation(part)); + outStream.writeBytes("Detailed Partition Information"); + outStream.write(separator); + outStream.writeBytes(part.getTPartition().toString()); + outStream.write(separator); // comment column is empty outStream.write(terminator); } else { // show table information - outStream.writeBytes(MetaDataFormatUtils.getTableInformation(tbl)); + outStream.writeBytes("Detailed Table Information"); + outStream.write(separator); + outStream.writeBytes(tbl.getTTable().toString()); + outStream.write(separator); outStream.write(terminator); } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 999101) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision ) @@ -787,9 +787,12 @@ partSpec = getPartSpec(partspec); } - boolean isExt = ast.getChildCount() > 1; - DescTableDesc descTblDesc = new DescTableDesc(ctx.getResFile(), tableName, - partSpec, isExt); + DescTableDesc descTblDesc = new DescTableDesc(ctx.getResFile(), tableName, partSpec); + if (ast.getChildCount() == 2) { + int descOptions = ast.getChild(1).getType(); + descTblDesc.setFormatted(descOptions == HiveParser.KW_FORMATTED); + descTblDesc.setExt(descOptions == HiveParser.KW_EXTENDED); + } rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descTblDesc), conf)); setFetchTask(createFetchTask(DescTableDesc.getSchema())); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1001825) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision ) @@ -620,7 +620,7 @@ descStatement @init { msgs.push("describe statement"); } @after { msgs.pop(); } - : (KW_DESCRIBE|KW_DESC) (isExtended=KW_EXTENDED)? (parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $isExtended?) + : (KW_DESCRIBE|KW_DESC) (descOptions=KW_FORMATTED|descOptions=KW_EXTENDED)? (parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions?) | (KW_DESCRIBE|KW_DESC) KW_FUNCTION KW_EXTENDED? (name=descFuncNames) -> ^(TOK_DESCFUNCTION $name KW_EXTENDED?) ; @@ -1813,6 +1813,7 @@ KW_FUNCTION: 'FUNCTION'; KW_EXPLAIN: 'EXPLAIN'; KW_EXTENDED: 'EXTENDED'; +KW_FORMATTED: 'FORMATTED'; KW_SERDE: 'SERDE'; KW_WITH: 'WITH'; KW_DEFERRED: 'DEFERRED'; Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java (revision 999101) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java (revision ) @@ -39,6 +39,7 @@ HashMap partSpec; String resFile; boolean isExt; + boolean isFormatted; /** * table name for the result of describe table. */ @@ -52,14 +53,14 @@ } /** - * @param isExt * @param partSpec * @param resFile * @param tableName */ public DescTableDesc(Path resFile, String tableName, - HashMap partSpec, boolean isExt) { - this.isExt = isExt; + HashMap partSpec) { + this.isExt = false; + this.isFormatted = false; this.partSpec = partSpec; this.resFile = resFile.toString(); this.tableName = tableName; @@ -88,7 +89,22 @@ this.isExt = isExt; } - /** + /** + * @return the isFormatted + */ + public boolean isFormatted() { + return isFormatted; + } + + /** + * @param isFormatted + * the isFormat to set + */ + public void setFormatted(boolean isFormat) { + this.isFormatted = isFormat; + } + + /** * @return the tableName */ @Explain(displayName = "table")