diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 6cd1f39..626998c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -2544,12 +2544,14 @@ private void analyzeAlterTableRenamePart(ASTNode ast, String tblName, } Table tab = getTable(tblName, true); validateAlterTableType(tab, AlterTableTypes.RENAMEPARTITION); - inputs.add(new ReadEntity(tab)); + ReadEntity re = new ReadEntity(tab); + re.noLockNeeded(); + inputs.add(re); List> partSpecs = new ArrayList>(); partSpecs.add(oldPartSpec); partSpecs.add(newPartSpec); - addTablePartsOutputs(tblName, partSpecs); + addTablePartsOutputs(tblName, partSpecs, WriteEntity.WriteType.DDL_EXCLUSIVE); RenamePartitionDesc renamePartitionDesc = new RenamePartitionDesc( SessionState.get().getCurrentDatabase(), tblName, oldPartSpec, newPartSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), @@ -2828,7 +2830,7 @@ private void analyzeAlterTableTouch(CommonTree ast) rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc), conf)); } else { - addTablePartsOutputs(tblName, partSpecs); + addTablePartsOutputs(tblName, partSpecs, WriteEntity.WriteType.DDL_NO_LOCK); for (Map partSpec : partSpecs) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( SessionState.get().getCurrentDatabase(), tblName, partSpec, @@ -2851,7 +2853,7 @@ private void analyzeAlterTableArchive(CommonTree ast, boolean isUnArchive) List> partSpecs = getPartitionSpecs(ast); Table tab = getTable(tblName, true); - addTablePartsOutputs(tblName, partSpecs, true); + addTablePartsOutputs(tblName, partSpecs, true, WriteEntity.WriteType.DDL_NO_LOCK); validateAlterTableType(tab, AlterTableTypes.ARCHIVE); inputs.add(new ReadEntity(tab)); @@ -3039,9 +3041,10 @@ private void validatePartitionValues(Map partSpec) * Add the table partitions to be modified in the output, so that it is available for the * pre-execution hook. If the partition does not exist, no error is thrown. */ - private void addTablePartsOutputs(String tblName, List> partSpecs) + private void addTablePartsOutputs(String tblName, List> partSpecs, + WriteEntity.WriteType writeType) throws SemanticException { - addTablePartsOutputs(tblName, partSpecs, false, false, null); + addTablePartsOutputs(tblName, partSpecs, false, false, null, writeType); } /** @@ -3049,9 +3052,9 @@ private void addTablePartsOutputs(String tblName, List> part * pre-execution hook. If the partition does not exist, no error is thrown. */ private void addTablePartsOutputs(String tblName, List> partSpecs, - boolean allowMany) + boolean allowMany, WriteEntity.WriteType writeType) throws SemanticException { - addTablePartsOutputs(tblName, partSpecs, false, allowMany, null); + addTablePartsOutputs(tblName, partSpecs, false, allowMany, null, writeType); } /** @@ -3060,7 +3063,7 @@ private void addTablePartsOutputs(String tblName, List> part * throwIfNonExistent is true, otherwise ignore it. */ private void addTablePartsOutputs(String tblName, List> partSpecs, - boolean throwIfNonExistent, boolean allowMany, ASTNode ast) + boolean throwIfNonExistent, boolean allowMany, ASTNode ast, WriteEntity.WriteType writeType) throws SemanticException { Table tab = getTable(tblName); @@ -3096,7 +3099,7 @@ private void addTablePartsOutputs(String tblName, List> part } for (Partition p : parts) { // Don't request any locks here, as the table has already been locked. - outputs.add(new WriteEntity(p, WriteEntity.WriteType.DDL_NO_LOCK)); + outputs.add(new WriteEntity(p, writeType)); } } }