diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index c060d74..9b3ba46 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -973,13 +973,11 @@ private void analyzeTruncateTable(ASTNode ast) throws SemanticException { } truncateTblDesc.setColumnIndexes(new ArrayList(columnIndexes)); - truncateTblDesc.setInputDir(oldTblPartLoc); - addInputsOutputsAlterTable(tableName, partSpec); - truncateTblDesc.setLbCtx(lbCtx); - addInputsOutputsAlterTable(tableName, partSpec); + addInputsOutputsAlterTable(tableName, partSpec, AlterTableTypes.TRUNCATE); + ddlWork.setNeedLock(true); TableDesc tblDesc = Utilities.getTableDesc(table); // Write the output to temporary directory and move it to the final location at the end @@ -1365,13 +1363,18 @@ private void analyzeAlterTableFileFormat(ASTNode ast, String tableName, alterTblDesc), conf)); } - private void addInputsOutputsAlterTable(String tableName, Map partSpec) - throws SemanticException { - addInputsOutputsAlterTable(tableName, partSpec, null); + private void addInputsOutputsAlterTable(String tableName, Map partSpec, + AlterTableTypes op) throws SemanticException { + addInputsOutputsAlterTable(tableName, partSpec, null, op.getName()); + } + + private void addInputsOutputsAlterTable(String tableName, Map partSpec, + AlterTableDesc desc) throws SemanticException { + addInputsOutputsAlterTable(tableName, partSpec, desc, desc.getOp().getName()); } private void addInputsOutputsAlterTable(String tableName, Map partSpec, - AlterTableDesc desc) throws SemanticException { + AlterTableDesc desc, String op) throws SemanticException { Table tab = getTable(tableName, true); // Determine the lock type to acquire WriteEntity.WriteType writeType = desc == null ? WriteEntity.WriteType.DDL_EXCLUSIVE : @@ -1391,13 +1394,13 @@ private void addInputsOutputsAlterTable(String tableName, Map pa if (isFullSpec(tab, partSpec)) { // Fully specified partition spec Partition part = getPartition(tab, partSpec, true); - outputs.add(new WriteEntity(part, writeType)); + outputs.add(new WriteEntity(part, writeType)); } else { // Partial partition spec supplied. Make sure this is allowed. if (desc == null || !AlterTableDesc.doesAlterTableTypeSupportPartialPartitionSpec(desc.getOp())) { throw new SemanticException( - ErrorMsg.ALTER_TABLE_TYPE_PARTIAL_PARTITION_SPEC_NO_SUPPORTED, desc.getOp().name()); + ErrorMsg.ALTER_TABLE_TYPE_PARTIAL_PARTITION_SPEC_NO_SUPPORTED, op); } else if (!conf.getBoolVar(HiveConf.ConfVars.DYNAMICPARTITIONING)) { throw new SemanticException(ErrorMsg.DYNAMIC_PARTITION_DISABLED); } @@ -1586,7 +1589,7 @@ private void analyzeAlterTablePartMergeFiles(ASTNode ast, mergeDesc.setLbCtx(lbCtx); - addInputsOutputsAlterTable(tableName, partSpec); + addInputsOutputsAlterTable(tableName, partSpec, AlterTableTypes.MERGEFILES); DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), mergeDesc); ddlWork.setNeedLock(true); Task mergeTask = TaskFactory.get(ddlWork, conf); @@ -1624,18 +1627,15 @@ private void analyzeAlterTablePartMergeFiles(ASTNode ast, private void analyzeAlterTableClusterSort(ASTNode ast, String tableName, HashMap partSpec) throws SemanticException { - addInputsOutputsAlterTable(tableName, partSpec); AlterTableDesc alterTblDesc; switch (ast.getChild(0).getType()) { case HiveParser.TOK_NOT_CLUSTERED: alterTblDesc = new AlterTableDesc(tableName, -1, new ArrayList(), new ArrayList(), partSpec); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); break; case HiveParser.TOK_NOT_SORTED: alterTblDesc = new AlterTableDesc(tableName, true, partSpec); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); break; case HiveParser.TOK_ALTERTABLE_BUCKETS: ASTNode buckets = (ASTNode) ast.getChild(0); @@ -1651,13 +1651,14 @@ private void analyzeAlterTableClusterSort(ASTNode ast, String tableName, if (numBuckets <= 0) { throw new SemanticException(ErrorMsg.INVALID_BUCKET_NUMBER.getMsg()); } - alterTblDesc = new AlterTableDesc(tableName, numBuckets, bucketCols, sortCols, partSpec); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterTblDesc), conf)); break; + default: + throw new SemanticException("Invalid operation " + ast.getChild(0).getType()); } + addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } private void analyzeAlterTableCompact(ASTNode ast, String tableName, @@ -3354,7 +3355,7 @@ private void analyzeAlterTableSkewedLocation(ASTNode ast, String tableName, } } AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, locations, partSpec); - addInputsOutputsAlterTable(tableName, partSpec); + addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java index f869821..4fcf1a2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java @@ -54,7 +54,9 @@ ALTERPARTITIONPROTECTMODE("alter partition protect mode"), ALTERLOCATION("alter location"), DROPPARTITION("drop partition"), RENAMEPARTITION("rename partition"), ADDSKEWEDBY("add skew column"), ALTERSKEWEDLOCATION("alter skew location"), ALTERBUCKETNUM("alter bucket number"), - ALTERPARTITION("alter partition"), COMPACT("compact"); + ALTERPARTITION("alter partition"), COMPACT("compact"), + TRUNCATE("truncate"), MERGEFILES("merge files"); + ; private final String name; private AlterTableTypes(String name) { this.name = name; }