diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java index c5be822..fc8afc0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java @@ -203,7 +203,9 @@ public static WriteType determineAlterTableWriteType(AlterTableDesc.AlterTableTy case ALTERBUCKETNUM: case ALTERPARTITION: case ADDCOLS: - case RENAME: return WriteType.DDL_EXCLUSIVE; + case RENAME: + case TRUNCATE: + case MERGEFILES: return WriteType.DDL_EXCLUSIVE; case ADDPARTITION: case ADDSERDEPROPS: diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 46e1c59..d2c4dc0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -975,13 +975,11 @@ private void analyzeTruncateTable(ASTNode ast) throws SemanticException { } truncateTblDesc.setColumnIndexes(new ArrayList(columnIndexes)); - truncateTblDesc.setInputDir(oldTblPartLoc); - addInputsOutputsAlterTable(tableName, partSpec); - truncateTblDesc.setLbCtx(lbCtx); - addInputsOutputsAlterTable(tableName, partSpec); + addInputsOutputsAlterTable(tableName, partSpec, AlterTableTypes.TRUNCATE); + ddlWork.setNeedLock(true); TableDesc tblDesc = Utilities.getTableDesc(table); // Write the output to temporary directory and move it to the final location at the end @@ -1367,29 +1365,29 @@ private void analyzeAlterTableFileFormat(ASTNode ast, String tableName, alterTblDesc), conf)); } - private void addInputsOutputsAlterTable(String tableName, Map partSpec) - throws SemanticException { - addInputsOutputsAlterTable(tableName, partSpec, null, false); + private void addInputsOutputsAlterTable(String tableName, Map partSpec, + AlterTableTypes op) throws SemanticException { + addInputsOutputsAlterTable(tableName, partSpec, null, op); } private void addInputsOutputsAlterTable(String tableName, Map partSpec, - AlterTableDesc desc)throws SemanticException { - addInputsOutputsAlterTable(tableName, partSpec, desc, false); + AlterTableDesc desc) throws SemanticException { + addInputsOutputsAlterTable(tableName, partSpec, desc, desc.getOp()); } private void addInputsOutputsAlterTable(String tableName, Map partSpec, - AlterTableDesc desc, boolean isCascade) throws SemanticException { + AlterTableDesc desc, AlterTableTypes op) throws SemanticException { + boolean isCascade = desc != null && desc.getIsCascade(); boolean alterPartitions = partSpec != null && !partSpec.isEmpty(); //cascade only occurs at table level then cascade to partition level if (isCascade && alterPartitions) { throw new SemanticException( - ErrorMsg.ALTER_TABLE_PARTITION_CASCADE_NOT_SUPPORTED, desc.getOp().name()); + ErrorMsg.ALTER_TABLE_PARTITION_CASCADE_NOT_SUPPORTED, op.getName()); } Table tab = getTable(tableName, true); // Determine the lock type to acquire - WriteEntity.WriteType writeType = desc == null ? WriteEntity.WriteType.DDL_EXCLUSIVE : - WriteEntity.determineAlterTableWriteType(desc.getOp()); + WriteEntity.WriteType writeType = WriteEntity.determineAlterTableWriteType(op); if (!alterPartitions) { inputs.add(new ReadEntity(tab)); @@ -1411,14 +1409,12 @@ private void addInputsOutputsAlterTable(String tableName, Map pa if (isFullSpec(tab, partSpec)) { // Fully specified partition spec Partition part = getPartition(tab, partSpec, true); - outputs.add(new WriteEntity(part, writeType)); + outputs.add(new WriteEntity(part, writeType)); } else { // Partial partition spec supplied. Make sure this is allowed. - if (desc == null - || !AlterTableDesc.doesAlterTableTypeSupportPartialPartitionSpec(desc.getOp())) { - String alterTabletype = (desc != null) ? desc.getOp().name() : ""; + if (!AlterTableDesc.doesAlterTableTypeSupportPartialPartitionSpec(op)) { throw new SemanticException( - ErrorMsg.ALTER_TABLE_TYPE_PARTIAL_PARTITION_SPEC_NO_SUPPORTED, alterTabletype); + ErrorMsg.ALTER_TABLE_TYPE_PARTIAL_PARTITION_SPEC_NO_SUPPORTED, op.getName()); } else if (!conf.getBoolVar(HiveConf.ConfVars.DYNAMICPARTITIONING)) { throw new SemanticException(ErrorMsg.DYNAMIC_PARTITION_DISABLED); } @@ -1430,15 +1426,13 @@ private void addInputsOutputsAlterTable(String tableName, Map pa } if (desc != null) { - validateAlterTableType(tab, desc.getOp(), desc.getExpectView()); + validateAlterTableType(tab, op, desc.getExpectView()); // validate Unset Non Existed Table Properties - if (desc.getOp() == AlterTableDesc.AlterTableTypes.DROPPROPS && - desc.getIsDropIfExists() == false) { - Iterator keyItr = desc.getProps().keySet().iterator(); - while (keyItr.hasNext()) { - String currKey = keyItr.next(); - if (tab.getTTable().getParameters().containsKey(currKey) == false) { + if (op == AlterTableDesc.AlterTableTypes.DROPPROPS && !desc.getIsDropIfExists()) { + Map tableParams = tab.getTTable().getParameters(); + for (String currKey : desc.getProps().keySet()) { + if (!tableParams.containsKey(currKey)) { String errorMsg = "The following property " + currKey + " does not exist in " + tab.getTableName(); @@ -1607,7 +1601,7 @@ private void analyzeAlterTablePartMergeFiles(ASTNode ast, mergeDesc.setLbCtx(lbCtx); - addInputsOutputsAlterTable(tableName, partSpec); + addInputsOutputsAlterTable(tableName, partSpec, AlterTableTypes.MERGEFILES); DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), mergeDesc); ddlWork.setNeedLock(true); Task mergeTask = TaskFactory.get(ddlWork, conf); @@ -1645,18 +1639,15 @@ private void analyzeAlterTablePartMergeFiles(ASTNode ast, private void analyzeAlterTableClusterSort(ASTNode ast, String tableName, HashMap partSpec) throws SemanticException { - addInputsOutputsAlterTable(tableName, partSpec); AlterTableDesc alterTblDesc; switch (ast.getChild(0).getType()) { case HiveParser.TOK_NOT_CLUSTERED: alterTblDesc = new AlterTableDesc(tableName, -1, new ArrayList(), new ArrayList(), partSpec); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); break; case HiveParser.TOK_NOT_SORTED: alterTblDesc = new AlterTableDesc(tableName, true, partSpec); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); break; case HiveParser.TOK_ALTERTABLE_BUCKETS: ASTNode buckets = (ASTNode) ast.getChild(0); @@ -1675,10 +1666,12 @@ private void analyzeAlterTableClusterSort(ASTNode ast, String tableName, alterTblDesc = new AlterTableDesc(tableName, numBuckets, bucketCols, sortCols, partSpec); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterTblDesc), conf)); break; + default: + throw new SemanticException("Invalid operation " + ast.getChild(0).getType()); } + addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } private void analyzeAlterTableCompact(ASTNode ast, String tableName, @@ -1743,7 +1736,7 @@ static public String getAttemptTableName(Hive db, String qualifiedName, boolean // check whether the name starts with table // DESCRIBE table // DESCRIBE table.column - // DECRIBE table column + // DESCRIBE table column String tableName = qualifiedName.substring(0, qualifiedName.indexOf('.') == -1 ? qualifiedName.length() : qualifiedName.indexOf('.')); @@ -2559,7 +2552,7 @@ private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade); - addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc, isCascade); + addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); @@ -2615,7 +2608,7 @@ private void analyzeAlterTableModifyCols(String[] qualified, ASTNode ast, AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, partSpec, newCols, alterType, isCascade); - addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc, isCascade); + addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } @@ -3384,7 +3377,7 @@ private void analyzeAlterTableSkewedLocation(ASTNode ast, String tableName, } } AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, locations, partSpec); - addInputsOutputsAlterTable(tableName, partSpec); + addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java index 681d809..24cf1da 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java @@ -54,7 +54,9 @@ ALTERPARTITIONPROTECTMODE("alter partition protect mode"), ALTERLOCATION("alter location"), DROPPARTITION("drop partition"), RENAMEPARTITION("rename partition"), ADDSKEWEDBY("add skew column"), ALTERSKEWEDLOCATION("alter skew location"), ALTERBUCKETNUM("alter bucket number"), - ALTERPARTITION("alter partition"), COMPACT("compact"); + ALTERPARTITION("alter partition"), COMPACT("compact"), + TRUNCATE("truncate"), MERGEFILES("merge files"); + ; private final String name; private AlterTableTypes(String name) { this.name = name; }