Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1138144) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -34,14 +34,13 @@ import java.util.Collections; import java.util.Comparator; import java.util.Date; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; -import java.util.Map.Entry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -97,6 +96,7 @@ import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; @@ -129,7 +129,6 @@ import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.serde.Constants; @@ -162,6 +161,7 @@ private static String INTERMEDIATE_ORIGINAL_DIR_SUFFIX; private static String INTERMEDIATE_EXTRACTED_DIR_SUFFIX; + @Override public boolean requireLock() { return this.work != null && this.work.getNeedLock(); } @@ -2926,42 +2926,16 @@ dropTbl.getExpectView()); } - // get all partitions of the table - List partitionNames = - db.getPartitionNames(dropTbl.getTableName(), (short) -1); - Set> partitions = new HashSet>(); - for (String partitionName : partitionNames) { - try { - partitions.add(Warehouse.makeSpecFromName(partitionName)); - } catch (MetaException e) { - LOG.warn("Unrecognized partition name from metastore: " + partitionName); - } - } - // drop partitions in the list List partsToDelete = new ArrayList(); for (Map partSpec : dropTbl.getPartSpecs()) { - Iterator> it = partitions.iterator(); - while (it.hasNext()) { - Map part = it.next(); - // test if partSpec matches part - boolean match = true; - for (Map.Entry item : partSpec.entrySet()) { - if (!item.getValue().equals(part.get(item.getKey()))) { - match = false; - break; - } + List partitions = db.getPartitions(tbl, partSpec); + for (Partition p : partitions) { + if (!p.canDrop()) { + throw new HiveException("Table " + tbl.getTableName() + + " Partition " + p.getName() + + " is protected from being dropped"); } - if (match) { - Partition p = db.getPartition(tbl, part, false); - if (!p.canDrop()) { - throw new HiveException("Table " + tbl.getTableName() + - " Partition " + p.getName() + - " is protected from being dropped"); - } - - partsToDelete.add(p); - it.remove(); - } + partsToDelete.add(p); } }