Index: ql/src/test/results/clientnegative/showcrt_neg.q.out =================================================================== --- ql/src/test/results/clientnegative/showcrt_neg.q.out (revision 0) +++ ql/src/test/results/clientnegative/showcrt_neg.q.out (working copy) @@ -0,0 +1,3 @@ +PREHOOK: query: SHOW CREATETABLE tmp_nonexist +PREHOOK: type: SHOW_CREATETABLE +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask Index: ql/src/test/results/clientpositive/showcrt.q.out =================================================================== --- ql/src/test/results/clientpositive/showcrt.q.out (revision 0) +++ ql/src/test/results/clientpositive/showcrt.q.out (working copy) @@ -0,0 +1,294 @@ +PREHOOK: query: CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY '\045' LINES TERMINATED BY '\n' +STORED AS textfile +#### A masked pattern was here #### +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY '\045' LINES TERMINATED BY '\n' +STORED AS textfile +#### A masked pattern was here #### +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATETABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +POSTHOOK: query: SHOW CREATETABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +CREATE TABLE tmp_showcrt1 ( + key int, + value string, + newvalue bigint) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ',' + COLLECTION ITEMS TERMINATED BY '|' + MAP KEYS TERMINATED BY '%' + LINES TERMINATED BY '\n' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: DROP TABLE tmp_showcrt1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: DROP TABLE tmp_showcrt1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: CREATE EXTERNAL TABLE tmp_showcrt1 (key smallint, value float) +CLUSTERED BY (key) SORTED BY (value DESC) INTO 5 BUCKETS +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE EXTERNAL TABLE tmp_showcrt1 (key smallint, value float) +CLUSTERED BY (key) SORTED BY (value DESC) INTO 5 BUCKETS +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATETABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +POSTHOOK: query: SHOW CREATETABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +CREATE EXTERNAL TABLE tmp_showcrt1 ( + key smallint, + value float) +CLUSTERED BY ( + key) +SORTED BY ( + value DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATETABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +POSTHOOK: query: SHOW CREATETABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +CREATE TABLE tmp_showcrt1 ( + key smallint, + value float) +COMMENT 'temporary table' +CLUSTERED BY ( + key) +SORTED BY ( + value DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( + 'EXTERNAL'='FALSE', +#### A masked pattern was here #### +PREHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATETABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +POSTHOOK: query: SHOW CREATETABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +CREATE EXTERNAL TABLE tmp_showcrt1 ( + key smallint, + value float) +COMMENT 'changed comment' +CLUSTERED BY ( + key) +SORTED BY ( + value DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATETABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +POSTHOOK: query: SHOW CREATETABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +CREATE EXTERNAL TABLE tmp_showcrt1 ( + key smallint, + value float) +COMMENT 'changed comment' +CLUSTERED BY ( + key) +SORTED BY ( + value DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATETABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +POSTHOOK: query: SHOW CREATETABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +CREATE EXTERNAL TABLE tmp_showcrt1 ( + key smallint, + value float) +COMMENT 'changed comment' +CLUSTERED BY ( + key) +SORTED BY ( + value DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED BY + 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' +WITH SERDEPROPERTIES ( + 'serialization.format'='1') +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: DROP TABLE tmp_showcrt1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: DROP TABLE tmp_showcrt1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +COMMENT 'temporary table' +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' +OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +COMMENT 'temporary table' +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' +OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATETABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +POSTHOOK: query: SHOW CREATETABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +CREATE TABLE tmp_showcrt1 ( + key int, + value string, + newvalue bigint) +COMMENT 'temporary table' +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: DROP TABLE tmp_showcrt1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: DROP TABLE tmp_showcrt1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: CREATE EXTERNAL TABLE tmp_showcrt1 (key string, newvalue boolean COMMENT 'a new value') +COMMENT 'temporary table' +PARTITIONED BY (value bigint COMMENT 'some value') +CLUSTERED BY (key) SORTED BY (key ASC, newvalue DESC) INTO 10 BUCKETS +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' +WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'='$') +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE EXTERNAL TABLE tmp_showcrt1 (key string, newvalue boolean COMMENT 'a new value') +COMMENT 'temporary table' +PARTITIONED BY (value bigint COMMENT 'some value') +CLUSTERED BY (key) SORTED BY (key ASC, newvalue DESC) INTO 10 BUCKETS +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' +WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'='$') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tmp_showcrt1 +PREHOOK: query: SHOW CREATETABLE tmp_showcrt1 +PREHOOK: type: SHOW_CREATETABLE +POSTHOOK: query: SHOW CREATETABLE tmp_showcrt1 +POSTHOOK: type: SHOW_CREATETABLE +CREATE EXTERNAL TABLE tmp_showcrt1 ( + key string, + newvalue boolean COMMENT 'a new value') +COMMENT 'temporary table' +PARTITIONED BY ( + value bigint COMMENT 'some value') +CLUSTERED BY ( + key) +SORTED BY ( + key ASC, + newvalue DESC) +INTO 1 BUCKETS +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED BY + 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' +WITH SERDEPROPERTIES ( + 'serialization.format'='$', + 'field.delim'=',') +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: DROP TABLE tmp_showcrt1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@tmp_showcrt1 +PREHOOK: Output: default@tmp_showcrt1 +POSTHOOK: query: DROP TABLE tmp_showcrt1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@tmp_showcrt1 +POSTHOOK: Output: default@tmp_showcrt1 Index: ql/src/test/queries/clientnegative/showcrt_neg.q =================================================================== --- ql/src/test/queries/clientnegative/showcrt_neg.q (revision 0) +++ ql/src/test/queries/clientnegative/showcrt_neg.q (working copy) @@ -0,0 +1,2 @@ +SHOW CREATETABLE tmp_nonexist; + Index: ql/src/test/queries/clientpositive/showcrt.q =================================================================== --- ql/src/test/queries/clientpositive/showcrt.q (revision 0) +++ ql/src/test/queries/clientpositive/showcrt.q (working copy) @@ -0,0 +1,40 @@ +CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY '\045' LINES TERMINATED BY '\n' +STORED AS textfile +LOCATION 'file:${system:test.tmp.dir}/tmp_showcrt1'; +SHOW CREATETABLE tmp_showcrt1; +DROP TABLE tmp_showcrt1; + + +CREATE EXTERNAL TABLE tmp_showcrt1 (key smallint, value float) +CLUSTERED BY (key) SORTED BY (value DESC) INTO 5 BUCKETS; +SHOW CREATETABLE tmp_showcrt1; +ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE'); +SHOW CREATETABLE tmp_showcrt1; +ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE'); +SHOW CREATETABLE tmp_showcrt1; +ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE'); +SHOW CREATETABLE tmp_showcrt1; +ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'); +SHOW CREATETABLE tmp_showcrt1; +DROP TABLE tmp_showcrt1; + + +CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint) +COMMENT 'temporary table' +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' +OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'; +SHOW CREATETABLE tmp_showcrt1; +DROP TABLE tmp_showcrt1; + + +CREATE EXTERNAL TABLE tmp_showcrt1 (key string, newvalue boolean COMMENT 'a new value') +COMMENT 'temporary table' +PARTITIONED BY (value bigint COMMENT 'some value') +CLUSTERED BY (key) SORTED BY (key ASC, newvalue DESC) INTO 10 BUCKETS +ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' +STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' +WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'='$'); +SHOW CREATETABLE tmp_showcrt1; +DROP TABLE tmp_showcrt1; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -44,6 +44,8 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.apache.commons.lang.StringEscapeUtils; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataOutputStream; @@ -72,6 +74,8 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; @@ -99,6 +103,7 @@ import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter; import org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataFormatter; import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; @@ -128,6 +133,7 @@ import org.apache.hadoop.hive.ql.plan.RenamePartitionDesc; import org.apache.hadoop.hive.ql.plan.RevokeDesc; import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; +import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc; import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; @@ -359,6 +365,11 @@ return showPartitions(db, showParts); } + ShowCreateTableDesc showCreateTbl = work.getShowCreateTblDesc(); + if (showCreateTbl != null) { + return showCreateTable(db, showCreateTbl); + } + RoleDDLDesc roleDDLDesc = work.getRoleDDLDesc(); if (roleDDLDesc != null) { return roleDDL(roleDDLDesc); @@ -1876,6 +1887,196 @@ } /** + * Write a statement of how to create a table to a file. + * + * @param db + * The database in question. + * @param showCreateTbl + * This is the table we're interested in. + * @return Returns 0 when execution succeeds and above 0 if it fails. + * @throws HiveException + * Throws this exception if an unexpected error occurs. + */ + private int showCreateTable(Hive db, ShowCreateTableDesc showCreateTbl) throws HiveException { + // get the create table statement for the table and populate the output + String tableName = showCreateTbl.getTableName(); + Table tbl = db.getTable(tableName, false); + DataOutput outStream = null; + List duplicateProps = new ArrayList(); + try { + Path resFile = new Path(showCreateTbl.getResFile()); + FileSystem fs = resFile.getFileSystem(conf); + outStream = fs.create(resFile); + + if (tbl == null) { + outStream.writeBytes("Table " + tableName + " does not exist\n"); + ((FSDataOutputStream) outStream).close(); + outStream = null; + return 1; + } + + StringBuilder createTab = new StringBuilder(); + createTab.append("CREATE "); + // For cases where the table is external + if (tbl.getTableType() == TableType.EXTERNAL_TABLE) { + duplicateProps.add("EXTERNAL"); + createTab.append("EXTERNAL "); + } + createTab.append("TABLE " + tableName + " ( \n"); + + // Columns + List cols = tbl.getCols(); + List columns = new ArrayList(); + for (FieldSchema col : cols) { + String columnDesc = " " + col.getName() + " " + col.getType(); + if (col.getComment() != null) { + columnDesc = columnDesc + " COMMENT '" + col.getComment() + "'"; + } + columns.add(columnDesc); + } + createTab.append(StringUtils.join(columns, ", \n")); + createTab.append(") \n"); + + // Table comment + String tabComment = tbl.getProperty("comment"); + if (tabComment != null) { + duplicateProps.add("comment"); + createTab.append("COMMENT '" + tabComment + "' \n"); + } + + // Partitions + List partKeys = tbl.getPartitionKeys(); + if (partKeys.size() > 0) { + createTab.append("PARTITIONED BY ( \n"); + List partCols = new ArrayList(); + for (FieldSchema partKey : partKeys) { + String partColDesc = " " + partKey.getName() + " " + partKey.getType(); + if (partKey.getComment() != null) { + partColDesc = partColDesc + " COMMENT '" + partKey.getComment() + "'"; + } + partCols.add(partColDesc); + } + createTab.append(StringUtils.join(partCols, ", \n")); + createTab.append(") \n"); + } + + // Clusters (Buckets) + List buckCols = tbl.getBucketCols(); + if (buckCols.size() > 0) { + duplicateProps.add("SORTBUCKETCOLSPREFIX"); + createTab.append("CLUSTERED BY ( \n "); + createTab.append(StringUtils.join(buckCols, ", \n ")); + createTab.append(") \n"); + List sortCols = tbl.getSortCols(); + if (sortCols.size() > 0) { + createTab.append("SORTED BY ( \n"); + // Order + List sortKeys = new ArrayList(); + for (Order sortCol : sortCols) { + String sortKeyDesc = " " + sortCol.getCol() + " "; + if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC) { + sortKeyDesc = sortKeyDesc + "ASC"; + } + else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) { + sortKeyDesc = sortKeyDesc + "DESC"; + } + sortKeys.add(sortKeyDesc); + } + createTab.append(StringUtils.join(sortKeys, ", \n")); + createTab.append(") \n"); + } + createTab.append("INTO " + buckCols.size() + " BUCKETS \n"); + } + + // Row format (SerDe) + StorageDescriptor sd = tbl.getTTable().getSd(); + SerDeInfo serdeInfo = sd.getSerdeInfo(); + createTab.append("ROW FORMAT"); + if (tbl.getStorageHandler() == null) { + if (serdeInfo.getParametersSize() > 1) { + // There is a "serialization.format" property by default, + // even with a delimited row format. + // But our result will only cover the following four delimiters. + createTab.append(" DELIMITED \n"); + Map delims = serdeInfo.getParameters(); + // Warn: + // If the four delimiters all exist in a CREATE TABLE query, + // this following order needs to be strictly followed, + // or the query will fail with a ParseException. + if (delims.containsKey("field.delim")) { + createTab.append(" FIELDS TERMINATED BY '" + + StringEscapeUtils.escapeJava(delims.get("field.delim")) + "' \n"); + } + if (delims.containsKey("colelction.delim")) { + createTab.append(" COLLECTION ITEMS TERMINATED BY '" + + StringEscapeUtils.escapeJava(delims.get("colelction.delim")) + "' \n"); + } + if (delims.containsKey("mapkey.delim")) { + createTab.append(" MAP KEYS TERMINATED BY '" + + StringEscapeUtils.escapeJava(delims.get("mapkey.delim")) + "' \n"); + } + if (delims.containsKey("line.delim")) { + createTab.append(" LINES TERMINATED BY '" + + StringEscapeUtils.escapeJava(delims.get("line.delim")) + "' \n"); + } + } + else { + createTab.append(" SERDE \n '" + serdeInfo.getSerializationLib() + "' \n"); + } + createTab.append("STORED AS INPUTFORMAT \n '" + sd.getInputFormat() + "' \n"); + createTab.append("OUTPUTFORMAT \n '" + sd.getOutputFormat() + "' \n"); + } + else { + duplicateProps.add("storage_handler"); + createTab.append(" SERDE \n '" + serdeInfo.getSerializationLib() + "' \n"); + createTab.append("STORED BY \n '" + tbl.getParameters().get("storage_handler") + "' \n"); + // SerDe Properties + if (serdeInfo.getParametersSize() > 0) { + createTab.append("WITH SERDEPROPERTIES ( \n"); + List serdeCols = new ArrayList(); + for (Map.Entry entry : serdeInfo.getParameters().entrySet()) { + serdeCols.add(" '" + entry.getKey() + "'='" + + StringEscapeUtils.escapeJava(entry.getValue()) + "'"); + } + createTab.append(StringUtils.join(serdeCols, ", \n")); + createTab.append(") \n"); + } + } + createTab.append("LOCATION \n '" + sd.getLocation() + "' \n"); + + // Table properties + createTab.append("TBLPROPERTIES ( \n"); + Map properties = tbl.getParameters(); + if (properties.size() > 0) { + List realProps = new ArrayList(); + for (String key : properties.keySet()) { + if (properties.get(key) != null && !duplicateProps.contains(key)) { + realProps.add(" '" + key + "'='" + properties.get(key) + "'"); + } + } + createTab.append(StringUtils.join(realProps, ", \n")); + createTab.append(") \n"); + } + + outStream.writeBytes(createTab.toString()); + ((FSDataOutputStream) outStream).close(); + outStream = null; + } catch (FileNotFoundException e) { + LOG.info("show create table: " + stringifyException(e)); + return 1; + } catch (IOException e) { + LOG.info("show create table: " + stringifyException(e)); + return 1; + } catch (Exception e) { + throw new HiveException(e); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); + } + + return 0; + } + + /** * Write a list of indexes to a file. * * @param db Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (working copy) @@ -52,6 +52,7 @@ private ShowLocksDesc showLocksDesc; private DescFunctionDesc descFunctionDesc; private ShowPartitionsDesc showPartsDesc; + private ShowCreateTableDesc showCreateTblDesc; private DescTableDesc descTblDesc; private AddPartitionDesc addPartitionDesc; private RenamePartitionDesc renamePartitionDesc; @@ -304,6 +305,16 @@ } /** + * @param showCreateTblDesc + */ + public DDLWork(HashSet inputs, HashSet outputs, + ShowCreateTableDesc showCreateTblDesc) { + this(inputs, outputs); + + this.showCreateTblDesc = showCreateTblDesc; + } + + /** * @param addPartitionDesc * information about the partitions we want to add. */ @@ -698,6 +709,22 @@ } /** + * @return the showCreateTblDesc + */ + @Explain(displayName = "Show Create Table Operator") + public ShowCreateTableDesc getShowCreateTblDesc() { + return showCreateTblDesc; + } + + /** + * @param showCreateTblDesc + * the showCreateTblDesc to set + */ + public void setShowCreateTblDesc(ShowCreateTableDesc showCreateTblDesc) { + this.showCreateTblDesc = showCreateTblDesc; + } + + /** * @return the showIndexesDesc */ @Explain(displayName = "Show Index Operator") Index: ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (working copy) @@ -53,6 +53,7 @@ SHOWTABLES("SHOWTABLES", null, null), SHOW_TABLESTATUS("SHOW_TABLESTATUS", null, null), SHOW_TBLPROPERTIES("SHOW_TBLPROPERTIES", null, null), + SHOW_CREATETABLE("SHOW_CREATETABLE", null, null), SHOWFUNCTIONS("SHOWFUNCTIONS", null, null), SHOWINDEXES("SHOWINDEXES", null, null), SHOWPARTITIONS("SHOWPARTITIONS", null, null), Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java (working copy) @@ -0,0 +1,97 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +/** + * ShowCreateTableDesc. + * + */ +@Explain(displayName = "Show Create Table") +public class ShowCreateTableDesc extends DDLDesc implements Serializable { + private static final long serialVersionUID = 1L; + String resFile; + String tableName; + + /** + * table name for the result of showcreatetable. + */ + private static final String table = "show_createtable"; + /** + * thrift ddl for the result of showcreatetable. + */ + private static final String schema = "createtab_cli#string"; + + public String getTable() { + return table; + } + + public String getSchema() { + return schema; + } + + /** + * For serialization use only. + */ + public ShowCreateTableDesc() { + } + + /** + * @param resFile + * @param tableName + * name of table to show + */ + public ShowCreateTableDesc(String tableName, String resFile) { + this.tableName = tableName; + this.resFile = resFile; + } + + /** + * @return the resFile + */ + @Explain(displayName = "result file", normalExplain = false) + public String getResFile() { + return resFile; + } + + /** + * @param resFile + * the resFile to set + */ + public void setResFile(String resFile) { + this.resFile = resFile; + } + + /** + * @return the tableName + */ + @Explain(displayName = "table name") + public String getTableName() { + return tableName; + } + + /** + * @param tableName + * the tableName to set + */ + public void setTableName(String tableName) { + this.tableName = tableName; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (working copy) @@ -83,6 +83,7 @@ xlateMap.put("KW_TABLE", "TABLE"); xlateMap.put("KW_TABLES", "TABLES"); xlateMap.put("KW_TBLPROPERTIES", "TBLPROPERTIES"); + xlateMap.put("KW_CREATETABLE", "CREATETABLE"); xlateMap.put("KW_SHOW", "SHOW"); xlateMap.put("KW_MSCK", "MSCK"); xlateMap.put("KW_DIRECTORY", "DIRECTORY"); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -144,6 +144,7 @@ TOK_SHOWTABLES; TOK_SHOWFUNCTIONS; TOK_SHOWPARTITIONS; +TOK_SHOW_CREATETABLE; TOK_SHOW_TABLESTATUS; TOK_SHOW_TBLPROPERTIES; TOK_SHOWLOCKS; @@ -846,6 +847,7 @@ | KW_SHOW KW_TABLES ((KW_FROM|KW_IN) db_name=Identifier)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)? -> ^(TOK_SHOWTABLES (TOK_FROM $db_name)? showStmtIdentifier?) | KW_SHOW KW_FUNCTIONS showStmtIdentifier? -> ^(TOK_SHOWFUNCTIONS showStmtIdentifier?) | KW_SHOW KW_PARTITIONS Identifier partitionSpec? -> ^(TOK_SHOWPARTITIONS Identifier partitionSpec?) + | KW_SHOW KW_CREATETABLE tblName=Identifier -> ^(TOK_SHOW_CREATETABLE $tblName) | KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=Identifier)? KW_LIKE showStmtIdentifier partitionSpec? -> ^(TOK_SHOW_TABLESTATUS showStmtIdentifier $db_name? partitionSpec?) | KW_SHOW KW_TBLPROPERTIES tblName=Identifier (LPAREN prptyName=StringLiteral RPAREN)? -> ^(TOK_SHOW_TBLPROPERTIES $tblName $prptyName?) @@ -2296,6 +2298,7 @@ KW_LIMIT: 'LIMIT'; KW_SET: 'SET'; KW_TBLPROPERTIES: 'TBLPROPERTIES'; +KW_CREATETABLE: 'CREATETABLE'; KW_IDXPROPERTIES: 'IDXPROPERTIES'; KW_VALUE_TYPE: '$VALUE$'; KW_ELEM_TYPE: '$ELEM$'; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy) @@ -61,6 +61,7 @@ commandType.put(HiveParser.TOK_SHOWTABLES, HiveOperation.SHOWTABLES); commandType.put(HiveParser.TOK_SHOW_TABLESTATUS, HiveOperation.SHOW_TABLESTATUS); commandType.put(HiveParser.TOK_SHOW_TBLPROPERTIES, HiveOperation.SHOW_TBLPROPERTIES); + commandType.put(HiveParser.TOK_SHOW_CREATETABLE, HiveOperation.SHOW_CREATETABLE); commandType.put(HiveParser.TOK_SHOWFUNCTIONS, HiveOperation.SHOWFUNCTIONS); commandType.put(HiveParser.TOK_SHOWINDEXES, HiveOperation.SHOWINDEXES); commandType.put(HiveParser.TOK_SHOWPARTITIONS, HiveOperation.SHOWPARTITIONS); @@ -159,6 +160,7 @@ case HiveParser.TOK_SHOWTABLES: case HiveParser.TOK_SHOW_TABLESTATUS: case HiveParser.TOK_SHOW_TBLPROPERTIES: + case HiveParser.TOK_SHOW_CREATETABLE: case HiveParser.TOK_SHOWFUNCTIONS: case HiveParser.TOK_SHOWPARTITIONS: case HiveParser.TOK_SHOWINDEXES: Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1360519) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -98,6 +98,7 @@ import org.apache.hadoop.hive.ql.plan.RenamePartitionDesc; import org.apache.hadoop.hive.ql.plan.RevokeDesc; import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; +import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc; import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; @@ -321,6 +322,10 @@ ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowPartitions(ast); break; + case HiveParser.TOK_SHOW_CREATETABLE: + ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); + analyzeShowCreateTable(ast); + break; case HiveParser.TOK_SHOWINDEXES: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowIndexes(ast); @@ -1413,6 +1418,15 @@ setFetchTask(createFetchTask(showPartsDesc.getSchema())); } + private void analyzeShowCreateTable(ASTNode ast) throws SemanticException { + ShowCreateTableDesc showCreateTblDesc; + String tableName = getUnescapedName((ASTNode)ast.getChild(0)); + showCreateTblDesc = new ShowCreateTableDesc(tableName, ctx.getResFile().toString()); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + showCreateTblDesc), conf)); + setFetchTask(createFetchTask(showCreateTblDesc.getSchema())); + } + private void analyzeShowDatabases(ASTNode ast) throws SemanticException { ShowDatabasesDesc showDatabasesDesc; if (ast.getChildCount() == 1) {