Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (revision 713815) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (working copy) @@ -261,7 +261,4 @@ throw new RuntimeException("cannot find field " + fieldName + " from " + fields); // return null; } - - - } Index: metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (revision 713815) +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (working copy) @@ -48,6 +48,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.util.StringUtils; @@ -553,19 +555,31 @@ String [] names = tableName.split("\\."); String last_name = names[names.length-1]; for(int i = 1; i < names.length; i++) { - if (!(oi instanceof StructObjectInspector)) { - oi = deserializer.getObjectInspector(); - break; + + if (oi instanceof StructObjectInspector) { + StructObjectInspector soi = (StructObjectInspector)oi; + StructField sf = soi.getStructFieldRef(names[i]); + if (sf == null) { + throw new MetaException("Invalid Field " + names[i]); + } else { + oi = sf.getFieldObjectInspector(); + } } - StructObjectInspector soi = (StructObjectInspector)oi; - StructField sf = soi.getStructFieldRef(names[i]); - if (sf == null) { - // If invalid field, then return the schema of the table - oi = deserializer.getObjectInspector(); - break; - } else { - oi = sf.getFieldObjectInspector(); + else if (oi instanceof ListObjectInspector && names[i].equalsIgnoreCase("$elem$")) { + ListObjectInspector loi = (ListObjectInspector)oi; + oi = loi.getListElementObjectInspector(); } + else if (oi instanceof MapObjectInspector && names[i].equalsIgnoreCase("$key$")) { + MapObjectInspector moi = (MapObjectInspector)oi; + oi = moi.getMapKeyObjectInspector(); + } + else if (oi instanceof MapObjectInspector && names[i].equalsIgnoreCase("$value$")) { + MapObjectInspector moi = (MapObjectInspector)oi; + oi = moi.getMapValueObjectInspector(); + } + else { + throw new MetaException("Unknown type for " + names[i]); + } } ArrayList str_fields = new ArrayList(); Index: ql/src/test/results/clientnegative/describe_xpath1.q.out =================================================================== --- ql/src/test/results/clientnegative/describe_xpath1.q.out (revision 0) +++ ql/src/test/results/clientnegative/describe_xpath1.q.out (revision 0) @@ -0,0 +1,2 @@ +Failed with exception cannot find field $elem$ from [public int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, public java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, public java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask Index: ql/src/test/results/clientnegative/describe_xpath2.q.out =================================================================== --- ql/src/test/results/clientnegative/describe_xpath2.q.out (revision 0) +++ ql/src/test/results/clientnegative/describe_xpath2.q.out (revision 0) @@ -0,0 +1,2 @@ +Failed with exception cannot find field $key$ from [public int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, public java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, public java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask Index: ql/src/test/results/clientnegative/describe_xpath3.q.out =================================================================== --- ql/src/test/results/clientnegative/describe_xpath3.q.out (revision 0) +++ ql/src/test/results/clientnegative/describe_xpath3.q.out (revision 0) @@ -0,0 +1,2 @@ +FAILED: Error in metadata: Error in getting fields from serde.Unknown type for abc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask Index: ql/src/test/results/clientnegative/describe_xpath4.q.out =================================================================== --- ql/src/test/results/clientnegative/describe_xpath4.q.out (revision 0) +++ ql/src/test/results/clientnegative/describe_xpath4.q.out (revision 0) @@ -0,0 +1,2 @@ +FAILED: Error in metadata: Error in getting fields from serde.Unknown type for abc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask Index: ql/src/test/results/clientpositive/input1.q.out =================================================================== --- ql/src/test/results/clientpositive/input1.q.out (revision 713815) +++ ql/src/test/results/clientpositive/input1.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_DESCTABLE (TOK_TAB TEST1)) + (TOK_DESCTABLE (TOK_TABTYPE TEST1)) STAGE DEPENDENCIES: Stage-0 is a root stage Index: ql/src/test/results/clientpositive/describe_xpath.q.out =================================================================== --- ql/src/test/results/clientpositive/describe_xpath.q.out (revision 0) +++ ql/src/test/results/clientpositive/describe_xpath.q.out (revision 0) @@ -0,0 +1,7 @@ +lint array 'from deserializer' +$elem$ int 'from deserializer' +$key$ string 'from deserializer' +$value$ string 'from deserializer' +myint int 'from deserializer' +mystring string 'from deserializer' +myint int 'from deserializer' Index: ql/src/test/results/clientpositive/input10.q.out =================================================================== --- ql/src/test/results/clientpositive/input10.q.out (revision 713815) +++ ql/src/test/results/clientpositive/input10.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_DESCTABLE (TOK_TAB TEST10)) + (TOK_DESCTABLE (TOK_TABTYPE TEST10)) STAGE DEPENDENCIES: Stage-0 is a root stage Index: ql/src/test/results/clientpositive/inputddl6.q.out =================================================================== --- ql/src/test/results/clientpositive/inputddl6.q.out (revision 713815) +++ ql/src/test/results/clientpositive/inputddl6.q.out (working copy) @@ -2,18 +2,18 @@ value string ds datetime Detailed Table Information: -Table(tableName:inputddl6,dbName:default,owner:njain,createTime:1225994083,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/inputddl6,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null)],parameters:{}) +Table(tableName:inputddl6,dbName:default,owner:athusoo,createTime:1226706814,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/data/users/athusoo/apacheprojects/hive_local_ws2/trunk/build/ql/test/data/warehouse/inputddl6,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null)],parameters:{}) key string value string ds datetime Detailed Partition Information: -Partition(values:[2008-04-08],dbName:default,tableName:inputddl6,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/inputddl6/ds=2008-04-08,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{}) +Partition(values:[2008-04-08],dbName:default,tableName:inputddl6,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/data/users/athusoo/apacheprojects/hive_local_ws2/trunk/build/ql/test/data/warehouse/inputddl6/ds=2008-04-08,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{}) ds=2008-04-08 ds=2008-04-09 ds=2008-04-09 ABSTRACT SYNTAX TREE: - (TOK_DESCTABLE (TOK_TAB INPUTDDL6 (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-09'))) EXTENDED) + (TOK_DESCTABLE (TOK_TABTYPE INPUTDDL6 (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-09'))) EXTENDED) STAGE DEPENDENCIES: Stage-0 is a root stage Index: ql/src/test/queries/clientnegative/describe_xpath1.q =================================================================== --- ql/src/test/queries/clientnegative/describe_xpath1.q (revision 0) +++ ql/src/test/queries/clientnegative/describe_xpath1.q (revision 0) @@ -0,0 +1 @@ +describe src_thrift.$elem$; Index: ql/src/test/queries/clientnegative/describe_xpath2.q =================================================================== --- ql/src/test/queries/clientnegative/describe_xpath2.q (revision 0) +++ ql/src/test/queries/clientnegative/describe_xpath2.q (revision 0) @@ -0,0 +1 @@ +describe src_thrift.$key$; Index: ql/src/test/queries/clientnegative/describe_xpath3.q =================================================================== --- ql/src/test/queries/clientnegative/describe_xpath3.q (revision 0) +++ ql/src/test/queries/clientnegative/describe_xpath3.q (revision 0) @@ -0,0 +1 @@ +describe src_thrift.lint.abc; Index: ql/src/test/queries/clientnegative/describe_xpath4.q =================================================================== --- ql/src/test/queries/clientnegative/describe_xpath4.q (revision 0) +++ ql/src/test/queries/clientnegative/describe_xpath4.q (revision 0) @@ -0,0 +1 @@ +describe src_thrift.mStringString.abc; Index: ql/src/test/queries/clientpositive/describe_xpath.q =================================================================== --- ql/src/test/queries/clientpositive/describe_xpath.q (revision 0) +++ ql/src/test/queries/clientpositive/describe_xpath.q (revision 0) @@ -0,0 +1,17 @@ +-- Describe a list structure in a thrift table +describe src_thrift.lint; + +-- Describe the element of a list +describe src_thrift.lint.$elem$; + +-- Describe the key of a map +describe src_thrift.mStringString.$key$; + +-- Describe the value of a map +describe src_thrift.mStringString.$value$; + +-- Describe a complex element of a list +describe src_thrift.lintString.$elem$; + +-- Describe a member of an element of a list +describe src_thrift.lintString.$elem$.myint; Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (revision 713815) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (working copy) @@ -662,9 +662,9 @@ try { return MetaStoreUtils.getFieldsFromDeserializer(name, serde); } catch (SerDeException e) { - throw new HiveException("Error in getting fields from serde.", e); + throw new HiveException("Error in getting fields from serde. " + e.getMessage(), e); } catch (MetaException e) { - throw new HiveException("Error in getting fields from serde.", e); + throw new HiveException("Error in getting fields from serde." + e.getMessage(), e); } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 713815) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -321,14 +321,19 @@ } descTableDesc descTbl = work.getDescTblDesc(); + if (descTbl != null) { + + String colPath = descTbl.getTableName(); + String tableName = colPath.substring(0, colPath.indexOf('.') == -1 ? colPath.length() : colPath.indexOf('.')); + // describe the table - populate the output stream - Table tbl = db.getTable(descTbl.getTableName(), false); + Table tbl = db.getTable(tableName, false); Partition part = null; try { if(tbl == null) { DataOutput outStream = (DataOutput)fs.open(descTbl.getResFile()); - String errMsg = "Table " + descTbl.getTableName() + " does not exist"; + String errMsg = "Table " + tableName + " does not exist"; outStream.write(errMsg.getBytes("UTF-8")); ((FSDataOutputStream)outStream).close(); return 0; @@ -337,11 +342,12 @@ part = db.getPartition(tbl, descTbl.getPartSpec(), false); if(part == null) { DataOutput outStream = (DataOutput)fs.open(descTbl.getResFile()); - String errMsg = "Partition " + descTbl.getPartSpec() + " for table " + descTbl.getTableName() + " does not exist"; + String errMsg = "Partition " + descTbl.getPartSpec() + " for table " + tableName + " does not exist"; outStream.write(errMsg.getBytes("UTF-8")); ((FSDataOutputStream)outStream).close(); return 0; } + tbl = part.getTable(); } } catch (FileNotFoundException e) { LOG.info("describe table: " + StringUtils.stringifyException(e)); @@ -358,10 +364,17 @@ // write the results in the file DataOutput os = (DataOutput)fs.create(descTbl.getResFile()); - List cols = tbl.getCols(); - if(part != null) { - cols = part.getTPartition().getSd().getCols(); + List cols = null; + if (colPath.equals(tableName)) { + cols = tbl.getCols(); + if (part != null) { + cols = part.getTPartition().getSd().getCols(); + } } + else { + cols = db.getFieldsFromDeserializer(colPath, tbl.getDeserializer()); + } + Iterator iterCols = cols.iterator(); boolean firstCol = true; while (iterCols.hasNext()) @@ -382,35 +395,37 @@ firstCol = false; } - // also return the partitioning columns - List partCols = tbl.getPartCols(); - Iterator iterPartCols = partCols.iterator(); - while (iterPartCols.hasNext()) - { - os.write(terminator); - FieldSchema col = iterPartCols.next(); - os.write(col.getName().getBytes("UTF-8")); - os.write(separator); - os.write(col.getType().getBytes("UTF-8")); - if (col.getComment() != null) + if (tableName.equals(colPath)) { + // also return the partitioning columns + List partCols = tbl.getPartCols(); + Iterator iterPartCols = partCols.iterator(); + while (iterPartCols.hasNext()) { + os.write(terminator); + FieldSchema col = iterPartCols.next(); + os.write(col.getName().getBytes("UTF-8")); os.write(separator); - os.write(col.getComment().getBytes("UTF-8")); + os.write(col.getType().getBytes("UTF-8")); + if (col.getComment() != null) + { + os.write(separator); + os.write(col.getComment().getBytes("UTF-8")); + } } - } - // if extended desc table then show the complete details of the table - if(descTbl.isExt()) { - if(part != null) { - // show partition informatio - os.write("\n\nDetailed Partition Information:\n".getBytes("UTF-8")); - os.write(part.getTPartition().toString().getBytes("UTF-8")); - } else { - os.write("\nDetailed Table Information:\n".getBytes("UTF-8")); - os.write(tbl.getTTable().toString().getBytes("UTF-8")); + // if extended desc table then show the complete details of the table + if(descTbl.isExt()) { + if(part != null) { + // show partition informatio + os.write("\n\nDetailed Partition Information:\n".getBytes("UTF-8")); + os.write(part.getTPartition().toString().getBytes("UTF-8")); + } else { + os.write("\nDetailed Table Information:\n".getBytes("UTF-8")); + os.write(tbl.getTTable().toString().getBytes("UTF-8")); + } } } - + LOG.info("DDLTask: written data for " + tbl.getName()); ((FSDataOutputStream)os).close(); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 713815) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -113,6 +113,7 @@ TOK_TABLESERIALIZER; TOK_TABLEPROPERTIES; TOK_TABLEPROPLIST; +TOK_TABTYPE; TOK_LIMIT; TOK_TABLEPROPERTY; } @@ -207,8 +208,16 @@ -> ^(TOK_ALTERTABLE_SERDEPROPERTIES $name tableProperties) ; +tabTypeExpr + : Identifier (DOT^ (Identifier | KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE))* + ; + +partTypeExpr + : tabTypeExpr partitionSpec? -> ^(TOK_TABTYPE tabTypeExpr partitionSpec?) + ; + descStatement - : KW_DESCRIBE (isExtended=KW_EXTENDED)? (tab=tabName) -> ^(TOK_DESCTABLE $tab $isExtended?) + : KW_DESCRIBE (isExtended=KW_EXTENDED)? (parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $isExtended?) ; showStatement @@ -853,6 +862,7 @@ KW_COLLECTION: 'COLLECTION'; KW_ITEMS: 'ITEMS'; KW_KEYS: 'KEYS'; +KW_KEY_TYPE: '$KEY$'; KW_LINES: 'LINES'; KW_STORED: 'STORED'; KW_SEQUENCEFILE: 'SEQUENCEFILE'; @@ -878,6 +888,8 @@ KW_LIMIT: 'LIMIT'; KW_SET: 'SET'; KW_PROPERTIES: 'TBLPROPERTIES'; +KW_VALUE_TYPE: '$VALUE$'; +KW_ELEM_TYPE: '$ELEM$'; // Operators Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 713815) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -402,15 +402,34 @@ } return colList; } - + + /** + * Get the fully qualified name in the ast. e.g. the ast of the form ^(DOT ^(DOT a b) c) + * will generate a name of the form a.b.c + * + * @param ast The AST from which the qualified name has to be extracted + * @return String + */ + private String getFullyQualifiedName(CommonTree ast) { + if (ast.getChildCount() == 0) { + return ast.getText(); + } + + return getFullyQualifiedName((CommonTree)ast.getChild(0)) + "." + + getFullyQualifiedName((CommonTree)ast.getChild(1)); + } + private void analyzeDescribeTable(CommonTree ast) throws SemanticException { - Tree table_t = ast.getChild(0); - String tableName = table_t.getChild(0).getText(); + CommonTree tableTypeExpr = (CommonTree)ast.getChild(0); + // Walk the tree and generate a list of components + ArrayList comp_list = new ArrayList(); + String tableName = getFullyQualifiedName((CommonTree)tableTypeExpr.getChild(0)); + HashMap partSpec = null; // get partition metadata if partition specified - if (table_t.getChildCount() == 2) { - CommonTree partspec = (CommonTree) table_t.getChild(1); + if (tableTypeExpr.getChildCount() == 2) { + CommonTree partspec = (CommonTree) tableTypeExpr.getChild(1); partSpec = new LinkedHashMap(); for (int i = 0; i < partspec.getChildCount(); ++i) { CommonTree partspec_val = (CommonTree) partspec.getChild(i);