Index: ql/src/test/results/clientpositive/alter4.q.out =================================================================== --- ql/src/test/results/clientpositive/alter4.q.out (revision 0) +++ ql/src/test/results/clientpositive/alter4.q.out (revision 0) @@ -0,0 +1,36 @@ +PREHOOK: query: DROP TABLE set_bucketing_test +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE set_bucketing_test +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE set_bucketing_test (key INT, value STRING) CLUSTERED BY (key) INTO 10 BUCKETS +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE set_bucketing_test (key INT, value STRING) CLUSTERED BY (key) INTO 10 BUCKETS +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@set_bucketing_test +PREHOOK: query: DESCRIBE EXTENDED set_bucketing_test +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE EXTENDED set_bucketing_test +POSTHOOK: type: DESCTABLE +key int +value string + +Detailed Table Information Table(tableName:set_bucketing_test, dbName:default, owner:pyang, createTime:1277867951, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/pyang/task/trunk/VENDOR.hive/trunk/build/ql/test/data/warehouse/set_bucketing_test, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:10, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[key], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1277867951}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: ALTER TABLE set_bucketing_test NOT CLUSTERED +PREHOOK: type: null +POSTHOOK: query: ALTER TABLE set_bucketing_test NOT CLUSTERED +POSTHOOK: type: null +POSTHOOK: Input: default@set_bucketing_test +POSTHOOK: Output: default@set_bucketing_test +PREHOOK: query: DESCRIBE EXTENDED set_bucketing_test +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE EXTENDED set_bucketing_test +POSTHOOK: type: DESCTABLE +key int +value string + +Detailed Table Information Table(tableName:set_bucketing_test, dbName:default, owner:pyang, createTime:1277867951, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/pyang/task/trunk/VENDOR.hive/trunk/build/ql/test/data/warehouse/set_bucketing_test, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=pyang, last_modified_time=1277867951, transient_lastDdlTime=1277867951}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: DROP TABLE set_bucketing_test +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE set_bucketing_test +POSTHOOK: type: DROPTABLE +POSTHOOK: Output: default@set_bucketing_test Index: ql/src/test/queries/clientpositive/alter4.q =================================================================== --- ql/src/test/queries/clientpositive/alter4.q (revision 0) +++ ql/src/test/queries/clientpositive/alter4.q (revision 0) @@ -0,0 +1,9 @@ +DROP TABLE set_bucketing_test; + +CREATE TABLE set_bucketing_test (key INT, value STRING) CLUSTERED BY (key) INTO 10 BUCKETS; +DESCRIBE EXTENDED set_bucketing_test; + +ALTER TABLE set_bucketing_test NOT CLUSTERED; +DESCRIBE EXTENDED set_bucketing_test; + +DROP TABLE set_bucketing_test; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 7220) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -1703,9 +1703,24 @@ Utilities.validateColumnNames(columns, Utilities .getColumnNamesFromSortCols(alterTbl.getSortColumns())); } - tbl.getTTable().getSd().setBucketCols(alterTbl.getBucketColumns()); - tbl.getTTable().getSd().setNumBuckets(alterTbl.getNumberBuckets()); - tbl.getTTable().getSd().setSortCols(alterTbl.getSortColumns()); + + int numBuckets = -1; + ArrayList bucketCols = null; + ArrayList sortCols = null; + + // -1 buckets means to turn off bucketing + if (alterTbl.getNumberBuckets() == -1) { + bucketCols = new ArrayList(); + sortCols = new ArrayList(); + numBuckets = -1; + } else { + bucketCols = alterTbl.getBucketColumns(); + sortCols = alterTbl.getSortColumns(); + numBuckets = alterTbl.getNumberBuckets(); + } + tbl.getTTable().getSd().setBucketCols(bucketCols); + tbl.getTTable().getSd().setNumBuckets(numBuckets); + tbl.getTTable().getSd().setSortCols(sortCols); } else { console.printError("Unsupported Alter commnad"); return 1; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 7220) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -403,6 +403,9 @@ @after{msgs.pop();} :name=Identifier tableBuckets ->^(TOK_ALTERTABLE_CLUSTER_SORT $name tableBuckets) + | + name=Identifier KW_NOT KW_CLUSTERED + ->^(TOK_ALTERTABLE_CLUSTER_SORT $name) ; fileFormat Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 7220) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -269,23 +269,31 @@ private void analyzeAlterTableClusterSort(ASTNode ast) throws SemanticException { String tableName = unescapeIdentifier(ast.getChild(0).getText()); - ASTNode buckets = (ASTNode) ast.getChild(1); - List bucketCols = getColumnNames((ASTNode) buckets.getChild(0)); - List sortCols = new ArrayList(); - int numBuckets = -1; - if (buckets.getChildCount() == 2) { - numBuckets = (Integer.valueOf(buckets.getChild(1).getText())).intValue(); + if (ast.getChildCount() == 1) { + // This means that we want to turn off bucketing + AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, -1, + new ArrayList(), new ArrayList()); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + alterTblDesc), conf)); } else { - sortCols = getColumnNamesOrder((ASTNode) buckets.getChild(1)); - numBuckets = (Integer.valueOf(buckets.getChild(2).getText())).intValue(); + ASTNode buckets = (ASTNode) ast.getChild(1); + List bucketCols = getColumnNames((ASTNode) buckets.getChild(0)); + List sortCols = new ArrayList(); + int numBuckets = -1; + if (buckets.getChildCount() == 2) { + numBuckets = (Integer.valueOf(buckets.getChild(1).getText())).intValue(); + } else { + sortCols = getColumnNamesOrder((ASTNode) buckets.getChild(1)); + numBuckets = (Integer.valueOf(buckets.getChild(2).getText())).intValue(); + } + if (numBuckets <= 0) { + throw new SemanticException(ErrorMsg.INVALID_BUCKET_NUMBER.getMsg()); + } + AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, numBuckets, + bucketCols, sortCols); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + alterTblDesc), conf)); } - if (numBuckets <= 0) { - throw new SemanticException(ErrorMsg.INVALID_BUCKET_NUMBER.getMsg()); - } - AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, numBuckets, - bucketCols, sortCols); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterTblDesc), conf)); } static HashMap getProps(ASTNode prop) {