diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 1ca8d31..d9d2c9e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -2533,9 +2533,8 @@ public class DDLTask extends Task implements Serializable { * Throws this exception if an unexpected error occurs. */ private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException { - String colPath = descTbl.getTableName(); - String tableName = colPath.substring(0, - colPath.indexOf('.') == -1 ? colPath.length() : colPath.indexOf('.')); + String colPath = descTbl.getColumnPath(); + String tableName = descTbl.getTableName(); // describe the table - populate the output stream Table tbl = db.getTable(tableName, false); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index f7257cd..da2a302 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -1358,20 +1358,234 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { } /** - * Get the fully qualified name in the ast. e.g. the ast of the form ^(DOT - * ^(DOT a b) c) will generate a name of the form a.b.c - * - * @param ast - * The AST from which the qualified name has to be extracted - * @return String + * Utility class to resolve QualifiedName */ - private String getFullyQualifiedName(ASTNode ast) { - if (ast.getChildCount() == 0) { - return ast.getText(); + static class QualifiedNameUtil { + + // delimiter to check DOT delimited qualified names + static String delimiter = "\\."; + + /** + * Get the fully qualified name in the ast. e.g. the ast of the form ^(DOT + * ^(DOT a b) c) will generate a name of the form a.b.c + * + * @param ast + * The AST from which the qualified name has to be extracted + * @return String + */ + static public String getFullyQualifiedName(ASTNode ast) { + if (ast.getChildCount() == 0) { + return ast.getText(); + } else if (ast.getChildCount() == 2) { + return getFullyQualifiedName((ASTNode) ast.getChild(0)) + "." + + getFullyQualifiedName((ASTNode) ast.getChild(1)); + } else if (ast.getChildCount() == 3) { + return getFullyQualifiedName((ASTNode) ast.getChild(0)) + "." + + getFullyQualifiedName((ASTNode) ast.getChild(1)) + "." + + getFullyQualifiedName((ASTNode) ast.getChild(2)); + } else { + return null; + } + } + + // assume the first component of DOT delimited name is tableName + // get the attemptTableName + static public String getAttemptTableName(Hive db, String qualifiedName, boolean isColumn) { + // check whether the name starts with table + // DESCRIBE table + // DESCRIBE table.column + // DECRIBE table column + String tableName = qualifiedName.substring(0, + qualifiedName.indexOf('.') == -1 ? + qualifiedName.length() : qualifiedName.indexOf('.')); + try { + Table tab = db.getTable(tableName); + if (tab != null) { + if (isColumn) { + // if attempt to get columnPath + // return the whole qualifiedName(table.column or table) + return qualifiedName; + } else { + // if attempt to get tableName + // return table + return tableName; + } + } + } catch (HiveException e) { + // assume the first DOT delimited component is tableName + // OK if it is not + // do nothing when having exception + return null; + } + return null; + } + + // get Database Name + static public String getDBName(Hive db, ASTNode ast) { + String dbName = null; + String fullyQualifiedName = getFullyQualifiedName(ast); + + // if database.table or database.table.column or table.column + // first try the first component of the DOT separated name + if (ast.getChildCount() >= 2) { + dbName = fullyQualifiedName.substring(0, + fullyQualifiedName.indexOf('.') == -1 ? + fullyQualifiedName.length() : + fullyQualifiedName.indexOf('.')); + try { + // if the database name is not valid + // it is table.column + // return null as dbName + if (!db.databaseExists(dbName)) { + return null; + } + } catch (HiveException e) { + return null; + } + } else { + // in other cases, return null + // database is not validated if null + return null; + } + return dbName; + } + + // get Table Name + static public String getTableName(Hive db, ASTNode ast) + throws SemanticException { + String tableName = null; + String fullyQualifiedName = getFullyQualifiedName(ast); + + // assume the first component of DOT delimited name is tableName + String attemptTableName = getAttemptTableName(db, fullyQualifiedName, false); + if (attemptTableName != null) { + return attemptTableName; + } + + // if the name does not start with table + // it should start with database + // DESCRIBE database.table + // DESCRIBE database.table column + if (fullyQualifiedName.split(delimiter).length == 3) { + // if DESCRIBE database.table.column + // invalid syntax exception + if (ast.getChildCount() == 2) { + throw new SemanticException(ErrorMsg.INVALID_TABLE_OR_COLUMN.getMsg(fullyQualifiedName)); + } else { + // if DESCRIBE database.table column + // return database.table as tableName + tableName = fullyQualifiedName.substring(0, + fullyQualifiedName.lastIndexOf('.')); + } + } else if (fullyQualifiedName.split(delimiter).length == 2) { + // if DESCRIBE database.table + // return database.table as tableName + tableName = fullyQualifiedName; + } else { + // if fullyQualifiedName only have one component + // it is an invalid table + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(fullyQualifiedName)); + } + + return tableName; + } + + // get column path + static public String getColPath( + Hive db, + ASTNode parentAst, + ASTNode ast, + String tableName, + Map partSpec) { + + // if parent has two children + // it could be DESCRIBE table key + // or DESCRIBE table partition + if (parentAst.getChildCount() == 2 && partSpec == null) { + // if partitionSpec is null + // it is DESCRIBE table key + // return table as columnPath + return getFullyQualifiedName(parentAst); + } + + // assume the first component of DOT delimited name is tableName + String attemptTableName = getAttemptTableName(db, tableName, true); + if (attemptTableName != null) { + return attemptTableName; + } + + // if the name does not start with table + // it should start with database + // DESCRIBE database.table + // DESCRIBE database.table column + if (tableName.split(delimiter).length == 3) { + // if DESCRIBE database.table column + // return table.column as column path + return tableName.substring( + tableName.indexOf(".") + 1, tableName.length()); + } + + // in other cases, column path is the same as tableName + return tableName; + } + + // get partition metadata + static public Map getPartitionSpec(Hive db, ASTNode ast, String tableName) + throws SemanticException { + // if ast has two children + // it could be DESCRIBE table key + // or DESCRIBE table partition + // check whether it is DESCRIBE table partition + if (ast.getChildCount() == 2) { + ASTNode partNode = (ASTNode) ast.getChild(1); + HashMap partSpec = null; + try { + partSpec = getPartSpec(partNode); + } catch (SemanticException e) { + // get exception in resolving partition + // it could be DESCRIBE table key + // return null + // continue processing for DESCRIBE table key + return null; + } + + Table tab = null; + try { + tab = db.getTable(tableName); + } catch (HiveException e) { + // if table not valid + // throw semantic exception + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName), e); + } + + if (partSpec != null) { + Partition part = null; + try { + part = db.getPartition(tab, partSpec, false); + } catch (HiveException e) { + // if get exception in finding partition + // it could be DESCRIBE table key + // return null + // continue processing for DESCRIBE table key + return null; + } + + // if partition is not found + // it is DESCRIBE table partition + // invalid partition exception + if (part == null) { + throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString())); + } + + // it is DESCRIBE table partition + // return partition metadata + return partSpec; + } + } + + return null; } - return getFullyQualifiedName((ASTNode) ast.getChild(0)) + "." - + getFullyQualifiedName((ASTNode) ast.getChild(1)); } /** @@ -1433,22 +1647,32 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { private void analyzeDescribeTable(ASTNode ast) throws SemanticException { ASTNode tableTypeExpr = (ASTNode) ast.getChild(0); - String tableName = getFullyQualifiedName((ASTNode) tableTypeExpr - .getChild(0)); - HashMap partSpec = null; - // get partition metadata if partition specified - if (tableTypeExpr.getChildCount() == 2) { - ASTNode partspec = (ASTNode) tableTypeExpr.getChild(1); - partSpec = getPartSpec(partspec); + String qualifiedName = + QualifiedNameUtil.getFullyQualifiedName((ASTNode) tableTypeExpr.getChild(0)); + String tableName = + QualifiedNameUtil.getTableName(db, (ASTNode)(tableTypeExpr.getChild(0))); + String dbName = + QualifiedNameUtil.getDBName(db, (ASTNode)(tableTypeExpr.getChild(0))); + + Map partSpec = + QualifiedNameUtil.getPartitionSpec(db, tableTypeExpr, tableName); + + String colPath = QualifiedNameUtil.getColPath( + db, tableTypeExpr, (ASTNode) tableTypeExpr.getChild(0), qualifiedName, partSpec); + + // if database is not the one currently using + // validate database + if (dbName != null) { + validateDatabase(dbName); + } + if (partSpec != null) { + validateTable(tableName, partSpec); } - // Handle xpath correctly - String actualTableName = tableName.substring(0, - tableName.indexOf('.') == -1 ? tableName.length() : tableName.indexOf('.')); - validateTable(actualTableName, partSpec); + DescTableDesc descTblDesc = new DescTableDesc( + ctx.getResFile(), tableName, partSpec, colPath); - DescTableDesc descTblDesc = new DescTableDesc(ctx.getResFile(), tableName, partSpec); if (ast.getChildCount() == 2) { int descOptions = ast.getChild(1).getType(); descTblDesc.setFormatted(descOptions == HiveParser.KW_FORMATTED); @@ -1708,7 +1932,8 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { ASTNode child = (ASTNode) ast.getChild(i); if (child.getType() == HiveParser.TOK_TABTYPE) { ASTNode tableTypeExpr = (ASTNode) child; - tableName = getFullyQualifiedName((ASTNode) tableTypeExpr.getChild(0)); + tableName = + QualifiedNameUtil.getFullyQualifiedName((ASTNode) tableTypeExpr.getChild(0)); // get partition metadata if partition specified if (tableTypeExpr.getChildCount() == 2) { ASTNode partspec = (ASTNode) tableTypeExpr.getChild(1); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g index 356779a..2d6a1ab 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g @@ -826,16 +826,29 @@ tabTypeExpr : Identifier (DOT^ (Identifier | KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE))* ; +descTabTypeExpr +@init { msgs.push("specifying describe table types"); } +@after { msgs.pop(); } + + : Identifier (DOT^ (Identifier | KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE))* Identifier? + ; + partTypeExpr @init { msgs.push("specifying table partitions"); } @after { msgs.pop(); } : tabTypeExpr partitionSpec? -> ^(TOK_TABTYPE tabTypeExpr partitionSpec?) ; +descPartTypeExpr +@init { msgs.push("specifying describe table partitions"); } +@after { msgs.pop(); } + : descTabTypeExpr partitionSpec? -> ^(TOK_TABTYPE descTabTypeExpr partitionSpec?) + ; + descStatement @init { msgs.push("describe statement"); } @after { msgs.pop(); } - : (KW_DESCRIBE|KW_DESC) (descOptions=KW_FORMATTED|descOptions=KW_EXTENDED)? (parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions?) + : (KW_DESCRIBE|KW_DESC) (descOptions=KW_FORMATTED|descOptions=KW_EXTENDED)? (parttype=descPartTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions?) | (KW_DESCRIBE|KW_DESC) KW_FUNCTION KW_EXTENDED? (name=descFuncNames) -> ^(TOK_DESCFUNCTION $name KW_EXTENDED?) | (KW_DESCRIBE|KW_DESC) KW_DATABASE KW_EXTENDED? (dbName=Identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?) ; diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java index ae6487d..fe5b497 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; -import java.util.HashMap; +import java.util.Map; import org.apache.hadoop.fs.Path; @@ -29,15 +29,17 @@ import org.apache.hadoop.fs.Path; */ @Explain(displayName = "Describe Table") public class DescTableDesc extends DDLDesc implements Serializable { - public void setPartSpec(HashMap partSpec) { + public void setPartSpec(Map partSpec) { this.partSpec = partSpec; } private static final long serialVersionUID = 1L; String tableName; - HashMap partSpec; + Map partSpec; String resFile; + + String colPath; boolean isExt; boolean isFormatted; /** @@ -58,12 +60,13 @@ public class DescTableDesc extends DDLDesc implements Serializable { * @param tableName */ public DescTableDesc(Path resFile, String tableName, - HashMap partSpec) { + Map partSpec, String colPath) { this.isExt = false; this.isFormatted = false; this.partSpec = partSpec; this.resFile = resFile.toString(); this.tableName = tableName; + this.colPath = colPath; } public String getTable() { @@ -121,10 +124,25 @@ public class DescTableDesc extends DDLDesc implements Serializable { } /** + * @param colPath + * the colPath to set + */ + public void setColPath(String colPath) { + this.colPath = colPath; + } + + /** + * @return the columnPath + */ + public String getColumnPath() { + return colPath; + } + + /** * @return the partSpec */ @Explain(displayName = "partition") - public HashMap getPartSpec() { + public Map getPartSpec() { return partSpec; } @@ -132,7 +150,7 @@ public class DescTableDesc extends DDLDesc implements Serializable { * @param partSpec * the partSpec to set */ - public void setPartSpecs(HashMap partSpec) { + public void setPartSpecs(Map partSpec) { this.partSpec = partSpec; } diff --git ql/src/test/queries/clientnegative/desc_failure3.q ql/src/test/queries/clientnegative/desc_failure3.q new file mode 100644 index 0000000..bee0ea5 --- /dev/null +++ ql/src/test/queries/clientnegative/desc_failure3.q @@ -0,0 +1,5 @@ +CREATE DATABASE db1; +CREATE TABLE db1.t1(key1 INT, value1 STRING) PARTITIONED BY (ds STRING, part STRING); + +-- describe database.table.column +DESCRIBE db1.t1.key1; diff --git ql/src/test/queries/clientpositive/describe_syntax.q ql/src/test/queries/clientpositive/describe_syntax.q new file mode 100644 index 0000000..c9255b5 --- /dev/null +++ ql/src/test/queries/clientpositive/describe_syntax.q @@ -0,0 +1,46 @@ + +CREATE DATABASE db1; +CREATE TABLE db1.t1(key1 INT, value1 STRING) PARTITIONED BY (ds STRING, part STRING); + +use db1; + +ALTER TABLE t1 ADD PARTITION (ds='3', part='3'); +ALTER TABLE t1 ADD PARTITION (ds='4', part='4'); +ALTER TABLE t1 ADD PARTITION (ds='4', part='5'); + +-- describe table +DESCRIBE t1; +DESCRIBE EXTENDED t1; +DESCRIBE FORMATTED t1; + +-- describe database.table +DESCRIBE db1.t1; +DESCRIBE EXTENDED db1.t1; +DESCRIBE FORMATTED db1.t1; + +-- describe table column +DESCRIBE t1 key1; +DESCRIBE EXTENDED t1 key1; +DESCRIBE FORMATTED t1 key1; + +-- describe database.tabe column +DESCRIBE db1.t1 key1; +DESCRIBE EXTENDED db1.t1 key1; +DESCRIBE FORMATTED db1.t1 key1; + +-- describe table.column +-- after first checking t1.key1 for database.table not valid +-- fall back to the old syntax table.column +DESCRIBE t1.key1; +DESCRIBE EXTENDED t1.key1; +DESCRIBE FORMATTED t1.key1; + +-- describe table partition +DESCRIBE t1 PARTITION(ds='4', part='5'); +DESCRIBE EXTENDED t1 PARTITION(ds='4', part='5'); +DESCRIBE FORMATTED t1 PARTITION(ds='4', part='5'); + +-- describe database.table partition +DESCRIBE db1.t1 PARTITION(ds='4', part='5'); +DESCRIBE EXTENDED db1.t1 PARTITION(ds='4', part='5'); +DESCRIBE FORMATTED db1.t1 PARTITION(ds='4', part='5'); diff --git ql/src/test/results/clientnegative/desc_failure3.q.out ql/src/test/results/clientnegative/desc_failure3.q.out new file mode 100644 index 0000000..1f709c9 --- /dev/null +++ ql/src/test/results/clientnegative/desc_failure3.q.out @@ -0,0 +1,10 @@ +PREHOOK: query: CREATE DATABASE db1 +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: CREATE DATABASE db1 +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: CREATE TABLE db1.t1(key1 INT, value1 STRING) PARTITIONED BY (ds STRING, part STRING) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE db1.t1(key1 INT, value1 STRING) PARTITIONED BY (ds STRING, part STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: db1@t1 +FAILED: SemanticException [Error 10004]: Invalid table alias or column reference db1.t1.key1 diff --git ql/src/test/results/clientpositive/describe_syntax.q.out ql/src/test/results/clientpositive/describe_syntax.q.out new file mode 100644 index 0000000..40f22b1 --- /dev/null +++ ql/src/test/results/clientpositive/describe_syntax.q.out @@ -0,0 +1,315 @@ +PREHOOK: query: CREATE DATABASE db1 +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: CREATE DATABASE db1 +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: CREATE TABLE db1.t1(key1 INT, value1 STRING) PARTITIONED BY (ds STRING, part STRING) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE db1.t1(key1 INT, value1 STRING) PARTITIONED BY (ds STRING, part STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: db1@t1 +PREHOOK: query: use db1 +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use db1 +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: ALTER TABLE t1 ADD PARTITION (ds='3', part='3') +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: db1@t1 +POSTHOOK: query: ALTER TABLE t1 ADD PARTITION (ds='3', part='3') +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: db1@t1 +POSTHOOK: Output: db1@t1@ds=3/part=3 +PREHOOK: query: ALTER TABLE t1 ADD PARTITION (ds='4', part='4') +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: db1@t1 +POSTHOOK: query: ALTER TABLE t1 ADD PARTITION (ds='4', part='4') +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: db1@t1 +POSTHOOK: Output: db1@t1@ds=4/part=4 +PREHOOK: query: ALTER TABLE t1 ADD PARTITION (ds='4', part='5') +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: db1@t1 +POSTHOOK: query: ALTER TABLE t1 ADD PARTITION (ds='4', part='5') +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: db1@t1 +POSTHOOK: Output: db1@t1@ds=4/part=5 +PREHOOK: query: -- describe table +DESCRIBE t1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: -- describe table +DESCRIBE t1 +POSTHOOK: type: DESCTABLE +key1 int +value1 string +ds string +part string +PREHOOK: query: DESCRIBE EXTENDED t1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE EXTENDED t1 +POSTHOOK: type: DESCTABLE +key1 int +value1 string +ds string +part string + +#### A masked pattern was here #### +PREHOOK: query: DESCRIBE FORMATTED t1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED t1 +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key1 int None +value1 string None + +# Partition Information +# col_name data_type comment + +ds string None +part string None + +# Detailed Table Information +Database: db1 +#### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: -- describe database.table +DESCRIBE db1.t1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: -- describe database.table +DESCRIBE db1.t1 +POSTHOOK: type: DESCTABLE +key1 int +value1 string +ds string +part string +PREHOOK: query: DESCRIBE EXTENDED db1.t1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE EXTENDED db1.t1 +POSTHOOK: type: DESCTABLE +key1 int +value1 string +ds string +part string + +#### A masked pattern was here #### +PREHOOK: query: DESCRIBE FORMATTED db1.t1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED db1.t1 +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key1 int None +value1 string None + +# Partition Information +# col_name data_type comment + +ds string None +part string None + +# Detailed Table Information +Database: db1 +#### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: -- describe table column +DESCRIBE t1 key1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: -- describe table column +DESCRIBE t1 key1 +POSTHOOK: type: DESCTABLE +key1 int from deserializer +PREHOOK: query: DESCRIBE EXTENDED t1 key1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE EXTENDED t1 key1 +POSTHOOK: type: DESCTABLE +key1 int from deserializer +PREHOOK: query: DESCRIBE FORMATTED t1 key1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED t1 key1 +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key1 int from deserializer +PREHOOK: query: -- describe database.tabe column +DESCRIBE db1.t1 key1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: -- describe database.tabe column +DESCRIBE db1.t1 key1 +POSTHOOK: type: DESCTABLE +key1 int from deserializer +PREHOOK: query: DESCRIBE EXTENDED db1.t1 key1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE EXTENDED db1.t1 key1 +POSTHOOK: type: DESCTABLE +key1 int from deserializer +PREHOOK: query: DESCRIBE FORMATTED db1.t1 key1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED db1.t1 key1 +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key1 int from deserializer +PREHOOK: query: -- describe table.column +-- after first checking t1.key1 for database.table not valid +-- fall back to the old syntax table.column +DESCRIBE t1.key1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: -- describe table.column +-- after first checking t1.key1 for database.table not valid +-- fall back to the old syntax table.column +DESCRIBE t1.key1 +POSTHOOK: type: DESCTABLE +key1 int from deserializer +PREHOOK: query: DESCRIBE EXTENDED t1.key1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE EXTENDED t1.key1 +POSTHOOK: type: DESCTABLE +key1 int from deserializer +PREHOOK: query: DESCRIBE FORMATTED t1.key1 +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED t1.key1 +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key1 int from deserializer +PREHOOK: query: -- describe table partition +DESCRIBE t1 PARTITION(ds='4', part='5') +PREHOOK: type: DESCTABLE +POSTHOOK: query: -- describe table partition +DESCRIBE t1 PARTITION(ds='4', part='5') +POSTHOOK: type: DESCTABLE +key1 int +value1 string +ds string +part string +PREHOOK: query: DESCRIBE EXTENDED t1 PARTITION(ds='4', part='5') +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE EXTENDED t1 PARTITION(ds='4', part='5') +POSTHOOK: type: DESCTABLE +key1 int +value1 string +ds string +part string + +#### A masked pattern was here #### +PREHOOK: query: DESCRIBE FORMATTED t1 PARTITION(ds='4', part='5') +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED t1 PARTITION(ds='4', part='5') +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key1 int None +value1 string None + +# Partition Information +# col_name data_type comment + +ds string None +part string None + +# Detailed Partition Information +Partition Value: [4, 5] +Database: db1 +Table: t1 +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: -- describe database.table partition +DESCRIBE db1.t1 PARTITION(ds='4', part='5') +PREHOOK: type: DESCTABLE +POSTHOOK: query: -- describe database.table partition +DESCRIBE db1.t1 PARTITION(ds='4', part='5') +POSTHOOK: type: DESCTABLE +key1 int +value1 string +ds string +part string +PREHOOK: query: DESCRIBE EXTENDED db1.t1 PARTITION(ds='4', part='5') +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE EXTENDED db1.t1 PARTITION(ds='4', part='5') +POSTHOOK: type: DESCTABLE +key1 int +value1 string +ds string +part string + +#### A masked pattern was here #### +PREHOOK: query: DESCRIBE FORMATTED db1.t1 PARTITION(ds='4', part='5') +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED db1.t1 PARTITION(ds='4', part='5') +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key1 int None +value1 string None + +# Partition Information +# col_name data_type comment + +ds string None +part string None + +# Detailed Partition Information +Partition Value: [4, 5] +Database: db1 +Table: t1 +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1