Index: build.xml =================================================================== --- build.xml (revision 1360519) +++ build.xml (working copy) @@ -508,7 +508,6 @@ - Index: eclipse-templates/.classpath =================================================================== --- eclipse-templates/.classpath (revision 1360519) +++ eclipse-templates/.classpath (working copy) @@ -84,6 +84,7 @@ + Index: serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/Constants.java =================================================================== --- serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/Constants.java (revision 1360519) +++ serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/Constants.java (working copy) @@ -89,6 +89,20 @@ public static final String LIST_COLUMN_TYPES = "columns.types"; + public static final String EXTERNAL = "external"; + + public static final String TBL_COMMENT = "tbl_comment"; + + public static final String LIST_PARTITIONS = "partitions"; + + public static final String SORT_BUCKET = "sort_bucket"; + + public static final String ROW_FORMAT = "row_format"; + + public static final String TBL_LOCATION = "tbl_location"; + + public static final String TBL_PROPERTIES = "tbl_properties"; + public static final Set PrimitiveTypes = new HashSet(); static { PrimitiveTypes.add("void"); Index: ql/src/test/results/clientnegative/show_create_table_index.q.out =================================================================== --- ql/src/test/results/clientnegative/show_create_table_index.q.out (revision 0) +++ ql/src/test/results/clientnegative/show_create_table_index.q.out (working copy) @@ -0,0 +1,11 @@ +PREHOOK: query: CREATE TABLE tmp_showcrt (key int, value string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE tmp_showcrt (key int, value string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_showcrt +PREHOOK: query: CREATE INDEX tmp_index on table tmp_showcrt(key) as 'compact' WITH DEFERRED REBUILD +PREHOOK: type: CREATEINDEX +POSTHOOK: query: CREATE INDEX tmp_index on table tmp_showcrt(key) as 'compact' WITH DEFERRED REBUILD +POSTHOOK: type: CREATEINDEX +POSTHOOK: Output: default@default__tmp_showcrt_tmp_index__ +FAILED: SemanticException [Error 10130]: SHOW CREATE TABLE does not support index. default__tmp_showcrt_tmp_index__ is INDEX_TABLE Index: ql/src/test/results/clientnegative/show_create_table_does_not_exist.q.out =================================================================== --- ql/src/test/results/clientnegative/show_create_table_does_not_exist.q.out (revision 0) +++ ql/src/test/results/clientnegative/show_create_table_does_not_exist.q.out (working copy) @@ -0,0 +1 @@ +FAILED: SemanticException [Error 10001]: Table not found tmp_nonexist Index: ql/src/test/results/clientpositive/show_create_table.q.out =================================================================== --- ql/src/test/results/clientpositive/show_create_table.q.out (revision 0) +++ ql/src/test/results/clientpositive/show_create_table.q.out (working copy) @@ -0,0 +1,379 @@ +PREHOOK: query: CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY '\045' LINES TERMINATED BY '\n' +STORED AS textfile +#### A masked pattern was here #### +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY '\045' LINES TERMINATED BY '\n' +STORED AS textfile +#### A masked pattern was here #### +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: default@tmp_showcrt1 +POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: default@tmp_showcrt1 +CREATE TABLE tmp_showcrt1( + key int, + value string, + newvalue bigint) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ',' + COLLECTION ITEMS TERMINATED BY '|' + MAP KEYS TERMINATED BY '%' + LINES TERMINATED BY '\n' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: DROP TABLE tmp_showcrt1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: DROP TABLE tmp_showcrt1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: CREATE EXTERNAL TABLE tmp_showcrt1 (key smallint, value float) +CLUSTERED BY (key) SORTED BY (value DESC) INTO 5 BUCKETS +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE EXTERNAL TABLE tmp_showcrt1 (key smallint, value float) +CLUSTERED BY (key) SORTED BY (value DESC) INTO 5 BUCKETS +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: default@tmp_showcrt1 +POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: default@tmp_showcrt1 +CREATE EXTERNAL TABLE tmp_showcrt1( + key smallint, + value float) +CLUSTERED BY ( + key) +SORTED BY ( + value DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: default@tmp_showcrt1 +POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: default@tmp_showcrt1 +CREATE TABLE tmp_showcrt1( + key smallint, + value float) +COMMENT 'temporary table' +CLUSTERED BY ( + key) +SORTED BY ( + value DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( + 'EXTERNAL'='FALSE', +#### A masked pattern was here #### +PREHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: default@tmp_showcrt1 +POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: default@tmp_showcrt1 +CREATE EXTERNAL TABLE tmp_showcrt1( + key smallint, + value float) +COMMENT 'changed comment' +CLUSTERED BY ( + key) +SORTED BY ( + value DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: default@tmp_showcrt1 +POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: default@tmp_showcrt1 +CREATE EXTERNAL TABLE tmp_showcrt1( + key smallint, + value float) +COMMENT 'changed comment' +CLUSTERED BY ( + key) +SORTED BY ( + value DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: default@tmp_showcrt1 +POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: default@tmp_showcrt1 +CREATE EXTERNAL TABLE tmp_showcrt1( + key smallint, + value float) +COMMENT 'changed comment' +CLUSTERED BY ( + key) +SORTED BY ( + value DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED BY + 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' +WITH SERDEPROPERTIES ( + 'serialization.format'='1') +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: DROP TABLE tmp_showcrt1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: DROP TABLE tmp_showcrt1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +COMMENT 'temporary table' +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' +OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +COMMENT 'temporary table' +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' +OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: default@tmp_showcrt1 +POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: default@tmp_showcrt1 +CREATE TABLE tmp_showcrt1( + key int, + value string, + newvalue bigint) +COMMENT 'temporary table' +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: DROP TABLE tmp_showcrt1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: DROP TABLE tmp_showcrt1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: CREATE EXTERNAL TABLE tmp_showcrt1 (key string, newvalue boolean COMMENT 'a new value') +COMMENT 'temporary table' +PARTITIONED BY (value bigint COMMENT 'some value') +CLUSTERED BY (key) SORTED BY (key ASC, newvalue DESC) INTO 10 BUCKETS +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' +WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'='$') +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE EXTERNAL TABLE tmp_showcrt1 (key string, newvalue boolean COMMENT 'a new value') +COMMENT 'temporary table' +PARTITIONED BY (value bigint COMMENT 'some value') +CLUSTERED BY (key) SORTED BY (key ASC, newvalue DESC) INTO 10 BUCKETS +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' +WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'='$') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: default@tmp_showcrt1 +POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: default@tmp_showcrt1 +CREATE EXTERNAL TABLE tmp_showcrt1( + key string, + newvalue boolean COMMENT 'a new value') +COMMENT 'temporary table' +PARTITIONED BY ( + value bigint COMMENT 'some value') +CLUSTERED BY ( + key) +SORTED BY ( + key ASC, + newvalue DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED BY + 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' +WITH SERDEPROPERTIES ( + 'serialization.format'='$', + 'field.delim'=',') +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: DROP TABLE tmp_showcrt1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: DROP TABLE tmp_showcrt1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: CREATE VIEW tmp_copy_src AS SELECT * FROM src +PREHOOK: type: CREATEVIEW +#### A masked pattern was here #### +POSTHOOK: query: CREATE VIEW tmp_copy_src AS SELECT * FROM src +POSTHOOK: type: CREATEVIEW +POSTHOOK: Output: default@tmp_copy_src +#### A masked pattern was here #### +PREHOOK: query: SHOW CREATE TABLE tmp_copy_src +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: default@tmp_copy_src +POSTHOOK: query: SHOW CREATE TABLE tmp_copy_src +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: default@tmp_copy_src +SELECT * FROM src +PREHOOK: query: DROP VIEW tmp_copy_src +PREHOOK: type: DROPVIEW +PREHOOK: Input: default@tmp_copy_src +PREHOOK: Output: default@tmp_copy_src +POSTHOOK: query: DROP VIEW tmp_copy_src +POSTHOOK: type: DROPVIEW +POSTHOOK: Input: default@tmp_copy_src +POSTHOOK: Output: default@tmp_copy_src +PREHOOK: query: CREATE DATABASE tmp_feng comment 'for show create table test' +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: CREATE DATABASE tmp_feng comment 'for show create table test' +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +default +tmp_feng +PREHOOK: query: CREATE TABLE tmp_feng.tmp_showcrt (key string, value int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE tmp_feng.tmp_showcrt (key string, value int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: tmp_feng@tmp_showcrt +PREHOOK: query: USE default +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: USE default +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: SHOW CREATE TABLE tmp_feng.tmp_showcrt +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: tmp_feng@tmp_showcrt +POSTHOOK: query: SHOW CREATE TABLE tmp_feng.tmp_showcrt +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: tmp_feng@tmp_showcrt +CREATE TABLE tmp_feng.tmp_showcrt( + key string, + value int) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: DROP TABLE tmp_feng.tmp_showcrt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE tmp_feng.tmp_showcrt +POSTHOOK: type: DROPTABLE +POSTHOOK: Output: tmp_feng@tmp_showcrt +PREHOOK: query: DROP DATABASE tmp_feng +PREHOOK: type: DROPDATABASE +POSTHOOK: query: DROP DATABASE tmp_feng +POSTHOOK: type: DROPDATABASE Index: ql/src/test/queries/clientnegative/show_create_table_index.q =================================================================== --- ql/src/test/queries/clientnegative/show_create_table_index.q (revision 0) +++ ql/src/test/queries/clientnegative/show_create_table_index.q (working copy) @@ -0,0 +1,6 @@ +CREATE TABLE tmp_showcrt (key int, value string); +CREATE INDEX tmp_index on table tmp_showcrt(key) as 'compact' WITH DEFERRED REBUILD; +SHOW CREATE TABLE default__tmp_showcrt_tmp_index__; +DROP INDEX tmp_index on tmp_showcrt; +DROP TABLE tmp_showcrt; + Index: ql/src/test/queries/clientnegative/show_create_table_does_not_exist.q =================================================================== --- ql/src/test/queries/clientnegative/show_create_table_does_not_exist.q (revision 0) +++ ql/src/test/queries/clientnegative/show_create_table_does_not_exist.q (working copy) @@ -0,0 +1,2 @@ +SHOW CREATE TABLE tmp_nonexist; + Index: ql/src/test/queries/clientpositive/show_create_table.q =================================================================== --- ql/src/test/queries/clientpositive/show_create_table.q (revision 0) +++ ql/src/test/queries/clientpositive/show_create_table.q (working copy) @@ -0,0 +1,53 @@ +CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY '\045' LINES TERMINATED BY '\n' +STORED AS textfile +LOCATION 'file:${system:test.tmp.dir}/tmp_showcrt1'; +SHOW CREATE TABLE tmp_showcrt1; +DROP TABLE tmp_showcrt1; + + +CREATE EXTERNAL TABLE tmp_showcrt1 (key smallint, value float) +CLUSTERED BY (key) SORTED BY (value DESC) INTO 5 BUCKETS; +SHOW CREATE TABLE tmp_showcrt1; +ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE'); +SHOW CREATE TABLE tmp_showcrt1; +ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE'); +SHOW CREATE TABLE tmp_showcrt1; +ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE'); +SHOW CREATE TABLE tmp_showcrt1; +ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'); +SHOW CREATE TABLE tmp_showcrt1; +DROP TABLE tmp_showcrt1; + + +CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +COMMENT 'temporary table' +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' +OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'; +SHOW CREATE TABLE tmp_showcrt1; +DROP TABLE tmp_showcrt1; + + +CREATE EXTERNAL TABLE tmp_showcrt1 (key string, newvalue boolean COMMENT 'a new value') +COMMENT 'temporary table' +PARTITIONED BY (value bigint COMMENT 'some value') +CLUSTERED BY (key) SORTED BY (key ASC, newvalue DESC) INTO 10 BUCKETS +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' +WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'='$'); +SHOW CREATE TABLE tmp_showcrt1; +DROP TABLE tmp_showcrt1; + +CREATE VIEW tmp_copy_src AS SELECT * FROM src; +SHOW CREATE TABLE tmp_copy_src; +DROP VIEW tmp_copy_src; + +CREATE DATABASE tmp_feng comment 'for show create table test'; +SHOW DATABASES; +CREATE TABLE tmp_feng.tmp_showcrt (key string, value int); +USE default; +SHOW CREATE TABLE tmp_feng.tmp_showcrt; +DROP TABLE tmp_feng.tmp_showcrt; +DROP DATABASE tmp_feng; + Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -44,6 +44,9 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.antlr.stringtemplate.StringTemplate; +import org.apache.commons.lang.StringEscapeUtils; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataOutputStream; @@ -72,6 +75,8 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; @@ -99,6 +104,7 @@ import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter; import org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataFormatter; import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; @@ -128,6 +134,7 @@ import org.apache.hadoop.hive.ql.plan.RenamePartitionDesc; import org.apache.hadoop.hive.ql.plan.RevokeDesc; import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; +import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc; import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; @@ -359,6 +366,11 @@ return showPartitions(db, showParts); } + ShowCreateTableDesc showCreateTbl = work.getShowCreateTblDesc(); + if (showCreateTbl != null) { + return showCreateTable(db, showCreateTbl); + } + RoleDDLDesc roleDDLDesc = work.getRoleDDLDesc(); if (roleDDLDesc != null) { return roleDDL(roleDDLDesc); @@ -1876,6 +1888,219 @@ } /** + * Write a statement of how to create a table to a file. + * + * @param db + * The database in question. + * @param showCreateTbl + * This is the table we're interested in. + * @return Returns 0 when execution succeeds and above 0 if it fails. + * @throws HiveException + * Throws this exception if an unexpected error occurs. + */ + private int showCreateTable(Hive db, ShowCreateTableDesc showCreateTbl) throws HiveException { + // get the create table statement for the table and populate the output + String tableName = showCreateTbl.getTableName(); + Table tbl = db.getTable(tableName, false); + DataOutput outStream = null; + List duplicateProps = new ArrayList(); + try { + Path resFile = new Path(showCreateTbl.getResFile()); + FileSystem fs = resFile.getFileSystem(conf); + outStream = fs.create(resFile); + + if (tbl.isView()) { + String createTab_stmt = tbl.getViewOriginalText(); + outStream.writeBytes(createTab_stmt.toString()); + ((FSDataOutputStream) outStream).close(); + outStream = null; + return 0; + } + + StringTemplate createTab_stmt = new StringTemplate("CREATE $" + Constants.EXTERNAL + "$ TABLE " + + tableName + "(\n" + + "$" + Constants.LIST_COLUMNS + "$)\n" + + "$" + Constants.TBL_COMMENT + "$\n" + + "$" + Constants.LIST_PARTITIONS + "$\n" + + "$" + Constants.SORT_BUCKET + "$\n" + + "$" + Constants.ROW_FORMAT + "$\n" + + "LOCATION\n" + + "$" + Constants.TBL_LOCATION + "$\n" + + "TBLPROPERTIES (\n" + + "$" + Constants.TBL_PROPERTIES + "$)\n"); + + // For cases where the table is external + String tbl_external = ""; + if (tbl.getTableType() == TableType.EXTERNAL_TABLE) { + duplicateProps.add("EXTERNAL"); + tbl_external = "EXTERNAL"; + } + + // Columns + String tbl_columns = ""; + List cols = tbl.getCols(); + List columns = new ArrayList(); + for (FieldSchema col : cols) { + String columnDesc = " " + col.getName() + " " + col.getType(); + if (col.getComment() != null) { + columnDesc = columnDesc + " COMMENT '" + col.getComment() + "'"; + } + columns.add(columnDesc); + } + tbl_columns = StringUtils.join(columns, ", \n"); + + // Table comment + String tbl_comment = ""; + String tabComment = tbl.getProperty("comment"); + if (tabComment != null) { + duplicateProps.add("comment"); + tbl_comment = "COMMENT '" + tabComment + "'"; + } + + // Partitions + String tbl_partitions = ""; + List partKeys = tbl.getPartitionKeys(); + if (partKeys.size() > 0) { + tbl_partitions += "PARTITIONED BY ( \n"; + List partCols = new ArrayList(); + for (FieldSchema partKey : partKeys) { + String partColDesc = " " + partKey.getName() + " " + partKey.getType(); + if (partKey.getComment() != null) { + partColDesc = partColDesc + " COMMENT '" + partKey.getComment() + "'"; + } + partCols.add(partColDesc); + } + tbl_partitions += StringUtils.join(partCols, ", \n"); + tbl_partitions += ")"; + } + + // Clusters (Buckets) + String tbl_sort_bucket = ""; + List buckCols = tbl.getBucketCols(); + if (buckCols.size() > 0) { + duplicateProps.add("SORTBUCKETCOLSPREFIX"); + tbl_sort_bucket += "CLUSTERED BY ( \n "; + tbl_sort_bucket += StringUtils.join(buckCols, ", \n "); + tbl_sort_bucket += ") \n"; + List sortCols = tbl.getSortCols(); + if (sortCols.size() > 0) { + tbl_sort_bucket += "SORTED BY ( \n"; + // Order + List sortKeys = new ArrayList(); + for (Order sortCol : sortCols) { + String sortKeyDesc = " " + sortCol.getCol() + " "; + if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC) { + sortKeyDesc = sortKeyDesc + "ASC"; + } + else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) { + sortKeyDesc = sortKeyDesc + "DESC"; + } + sortKeys.add(sortKeyDesc); + } + tbl_sort_bucket += StringUtils.join(sortKeys, ", \n"); + tbl_sort_bucket += ") \n"; + } + tbl_sort_bucket += "INTO " + buckCols.size() + " BUCKETS"; + } + + // Row format (SerDe) + String tbl_row_format = ""; + StorageDescriptor sd = tbl.getTTable().getSd(); + SerDeInfo serdeInfo = sd.getSerdeInfo(); + tbl_row_format += "ROW FORMAT"; + if (tbl.getStorageHandler() == null) { + if (serdeInfo.getParametersSize() > 1) { + // There is a "serialization.format" property by default, + // even with a delimited row format. + // But our result will only cover the following four delimiters. + tbl_row_format += " DELIMITED \n"; + Map delims = serdeInfo.getParameters(); + // Warn: + // If the four delimiters all exist in a CREATE TABLE query, + // this following order needs to be strictly followed, + // or the query will fail with a ParseException. + if (delims.containsKey(Constants.FIELD_DELIM)) { + tbl_row_format += " FIELDS TERMINATED BY '" + + StringEscapeUtils.escapeJava(delims.get("field.delim")) + "' \n"; + } + if (delims.containsKey(Constants.COLLECTION_DELIM)) { + tbl_row_format += " COLLECTION ITEMS TERMINATED BY '" + + StringEscapeUtils.escapeJava(delims.get("colelction.delim")) + "' \n"; + } + if (delims.containsKey(Constants.MAPKEY_DELIM)) { + tbl_row_format += " MAP KEYS TERMINATED BY '" + + StringEscapeUtils.escapeJava(delims.get("mapkey.delim")) + "' \n"; + } + if (delims.containsKey(Constants.LINE_DELIM)) { + tbl_row_format += " LINES TERMINATED BY '" + + StringEscapeUtils.escapeJava(delims.get("line.delim")) + "' \n"; + } + } + else { + tbl_row_format += " SERDE \n '" + serdeInfo.getSerializationLib() + "' \n"; + } + tbl_row_format += "STORED AS INPUTFORMAT \n '" + sd.getInputFormat() + "' \n"; + tbl_row_format += "OUTPUTFORMAT \n '" + sd.getOutputFormat() + "'"; + } + else { + duplicateProps.add(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE); + tbl_row_format += " SERDE \n '" + serdeInfo.getSerializationLib() + "' \n"; + tbl_row_format += "STORED BY \n '" + tbl.getParameters().get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE) + "' \n"; + // SerDe Properties + if (serdeInfo.getParametersSize() > 0) { + tbl_row_format += "WITH SERDEPROPERTIES ( \n"; + List serdeCols = new ArrayList(); + for (Map.Entry entry : serdeInfo.getParameters().entrySet()) { + serdeCols.add(" '" + entry.getKey() + "'='" + + StringEscapeUtils.escapeJava(entry.getValue()) + "'"); + } + tbl_row_format += StringUtils.join(serdeCols, ", \n"); + tbl_row_format += ")"; + } + } + String tbl_location = " '" + sd.getLocation() + "'"; + + // Table properties + String tbl_properties = ""; + Map properties = tbl.getParameters(); + if (properties.size() > 0) { + List realProps = new ArrayList(); + for (String key : properties.keySet()) { + if (properties.get(key) != null && !duplicateProps.contains(key)) { + realProps.add(" '" + key + "'='" + properties.get(key) + "'"); + } + } + tbl_properties += StringUtils.join(realProps, ", \n"); + } + + createTab_stmt.setAttribute(Constants.EXTERNAL, tbl_external); + createTab_stmt.setAttribute(Constants.LIST_COLUMNS, tbl_columns); + createTab_stmt.setAttribute(Constants.TBL_COMMENT, tbl_comment); + createTab_stmt.setAttribute(Constants.LIST_PARTITIONS, tbl_partitions); + createTab_stmt.setAttribute(Constants.SORT_BUCKET, tbl_sort_bucket); + createTab_stmt.setAttribute(Constants.ROW_FORMAT, tbl_row_format); + createTab_stmt.setAttribute(Constants.TBL_LOCATION, tbl_location); + createTab_stmt.setAttribute(Constants.TBL_PROPERTIES, tbl_properties); + + outStream.writeBytes(createTab_stmt.toString()); + ((FSDataOutputStream) outStream).close(); + outStream = null; + } catch (FileNotFoundException e) { + LOG.info("show create table: " + stringifyException(e)); + return 1; + } catch (IOException e) { + LOG.info("show create table: " + stringifyException(e)); + return 1; + } catch (Exception e) { + throw new HiveException(e); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); + } + + return 0; + } + + /** * Write a list of indexes to a file. * * @param db Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (working copy) @@ -52,6 +52,7 @@ private ShowLocksDesc showLocksDesc; private DescFunctionDesc descFunctionDesc; private ShowPartitionsDesc showPartsDesc; + private ShowCreateTableDesc showCreateTblDesc; private DescTableDesc descTblDesc; private AddPartitionDesc addPartitionDesc; private RenamePartitionDesc renamePartitionDesc; @@ -304,6 +305,16 @@ } /** + * @param showCreateTblDesc + */ + public DDLWork(HashSet inputs, HashSet outputs, + ShowCreateTableDesc showCreateTblDesc) { + this(inputs, outputs); + + this.showCreateTblDesc = showCreateTblDesc; + } + + /** * @param addPartitionDesc * information about the partitions we want to add. */ @@ -698,6 +709,22 @@ } /** + * @return the showCreateTblDesc + */ + @Explain(displayName = "Show Create Table Operator") + public ShowCreateTableDesc getShowCreateTblDesc() { + return showCreateTblDesc; + } + + /** + * @param showCreateTblDesc + * the showCreateTblDesc to set + */ + public void setShowCreateTblDesc(ShowCreateTableDesc showCreateTblDesc) { + this.showCreateTblDesc = showCreateTblDesc; + } + + /** * @return the showIndexesDesc */ @Explain(displayName = "Show Index Operator") Index: ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (working copy) @@ -53,6 +53,7 @@ SHOWTABLES("SHOWTABLES", null, null), SHOW_TABLESTATUS("SHOW_TABLESTATUS", null, null), SHOW_TBLPROPERTIES("SHOW_TBLPROPERTIES", null, null), + SHOW_CREATETABLE("SHOW_CREATETABLE", new Privilege[]{Privilege.SELECT}, null), SHOWFUNCTIONS("SHOWFUNCTIONS", null, null), SHOWINDEXES("SHOWINDEXES", null, null), SHOWPARTITIONS("SHOWPARTITIONS", null, null), Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java (working copy) @@ -0,0 +1,97 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +/** + * ShowCreateTableDesc. + * + */ +@Explain(displayName = "Show Create Table") +public class ShowCreateTableDesc extends DDLDesc implements Serializable { + private static final long serialVersionUID = 1L; + String resFile; + String tableName; + + /** + * table name for the result of showcreatetable. + */ + private static final String table = "show_create_table"; + /** + * thrift ddl for the result of showcreatetable. + */ + private static final String schema = "createtab_stmt#string"; + + public String getTable() { + return table; + } + + public String getSchema() { + return schema; + } + + /** + * For serialization use only. + */ + public ShowCreateTableDesc() { + } + + /** + * @param resFile + * @param tableName + * name of table to show + */ + public ShowCreateTableDesc(String tableName, String resFile) { + this.tableName = tableName; + this.resFile = resFile; + } + + /** + * @return the resFile + */ + @Explain(displayName = "result file", normalExplain = false) + public String getResFile() { + return resFile; + } + + /** + * @param resFile + * the resFile to set + */ + public void setResFile(String resFile) { + this.resFile = resFile; + } + + /** + * @return the tableName + */ + @Explain(displayName = "table name") + public String getTableName() { + return tableName; + } + + /** + * @param tableName + * the tableName to set + */ + public void setTableName(String tableName) { + this.tableName = tableName; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -144,6 +144,7 @@ TOK_SHOWTABLES; TOK_SHOWFUNCTIONS; TOK_SHOWPARTITIONS; +TOK_SHOW_CREATETABLE; TOK_SHOW_TABLESTATUS; TOK_SHOW_TBLPROPERTIES; TOK_SHOWLOCKS; @@ -846,6 +847,7 @@ | KW_SHOW KW_TABLES ((KW_FROM|KW_IN) db_name=Identifier)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)? -> ^(TOK_SHOWTABLES (TOK_FROM $db_name)? showStmtIdentifier?) | KW_SHOW KW_FUNCTIONS showStmtIdentifier? -> ^(TOK_SHOWFUNCTIONS showStmtIdentifier?) | KW_SHOW KW_PARTITIONS Identifier partitionSpec? -> ^(TOK_SHOWPARTITIONS Identifier partitionSpec?) + | KW_SHOW KW_CREATE KW_TABLE tabName=tableName -> ^(TOK_SHOW_CREATETABLE $tabName) | KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=Identifier)? KW_LIKE showStmtIdentifier partitionSpec? -> ^(TOK_SHOW_TABLESTATUS showStmtIdentifier $db_name? partitionSpec?) | KW_SHOW KW_TBLPROPERTIES tblName=Identifier (LPAREN prptyName=StringLiteral RPAREN)? -> ^(TOK_SHOW_TBLPROPERTIES $tblName $prptyName?) Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy) @@ -61,6 +61,7 @@ commandType.put(HiveParser.TOK_SHOWTABLES, HiveOperation.SHOWTABLES); commandType.put(HiveParser.TOK_SHOW_TABLESTATUS, HiveOperation.SHOW_TABLESTATUS); commandType.put(HiveParser.TOK_SHOW_TBLPROPERTIES, HiveOperation.SHOW_TBLPROPERTIES); + commandType.put(HiveParser.TOK_SHOW_CREATETABLE, HiveOperation.SHOW_CREATETABLE); commandType.put(HiveParser.TOK_SHOWFUNCTIONS, HiveOperation.SHOWFUNCTIONS); commandType.put(HiveParser.TOK_SHOWINDEXES, HiveOperation.SHOWINDEXES); commandType.put(HiveParser.TOK_SHOWPARTITIONS, HiveOperation.SHOWPARTITIONS); @@ -159,6 +160,7 @@ case HiveParser.TOK_SHOWTABLES: case HiveParser.TOK_SHOW_TABLESTATUS: case HiveParser.TOK_SHOW_TBLPROPERTIES: + case HiveParser.TOK_SHOW_CREATETABLE: case HiveParser.TOK_SHOWFUNCTIONS: case HiveParser.TOK_SHOWPARTITIONS: case HiveParser.TOK_SHOWINDEXES: Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -98,6 +98,7 @@ import org.apache.hadoop.hive.ql.plan.RenamePartitionDesc; import org.apache.hadoop.hive.ql.plan.RevokeDesc; import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; +import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc; import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; @@ -321,6 +322,10 @@ ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowPartitions(ast); break; + case HiveParser.TOK_SHOW_CREATETABLE: + ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); + analyzeShowCreateTable(ast); + break; case HiveParser.TOK_SHOWINDEXES: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowIndexes(ast); @@ -1413,6 +1418,27 @@ setFetchTask(createFetchTask(showPartsDesc.getSchema())); } + private void analyzeShowCreateTable(ASTNode ast) throws SemanticException { + ShowCreateTableDesc showCreateTblDesc; + String tableName = getUnescapedName((ASTNode)ast.getChild(0)); + showCreateTblDesc = new ShowCreateTableDesc(tableName, ctx.getResFile().toString()); + try { + Table tab = db.getTable(tableName, true); + if (tab.getTableType() == org.apache.hadoop.hive.metastore.TableType.INDEX_TABLE) { + throw new SemanticException(ErrorMsg.SHOW_CREATETABLE_INDEX.getMsg(tableName + + " is INDEX_TABLE")); + } + inputs.add(new ReadEntity(tab)); + } catch (SemanticException e) { + throw e; + } catch (HiveException e) { + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName)); + } + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + showCreateTblDesc), conf)); + setFetchTask(createFetchTask(showCreateTblDesc.getSchema())); + } + private void analyzeShowDatabases(ASTNode ast) throws SemanticException { ShowDatabasesDesc showDatabasesDesc; if (ast.getChildCount() == 1) { Index: ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (working copy) @@ -215,6 +215,7 @@ UDAF_INVALID_LOCATION(10128, "Not yet supported place for UDAF"), DROP_PARTITION_NON_STRING_PARTCOLS_NONEQUALITY(10129, "Drop partitions for a non string partition columns is not allowed using non-equality"), + SHOW_CREATETABLE_INDEX(10130, "SHOW CREATE TABLE does not support index."), SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."), SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. "