Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1140399) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -26,7 +26,6 @@ import java.io.Serializable; import java.net.URI; -import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -34,9 +33,9 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Properties; import java.util.Set; -import java.util.Map.Entry; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; @@ -59,8 +58,8 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.index.HiveIndex; +import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; -import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -71,7 +70,9 @@ import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; +import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; @@ -106,8 +107,6 @@ import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry; import org.apache.hadoop.hive.ql.session.SessionState; @@ -2053,18 +2052,19 @@ Iterator> i; int index; for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) { - Map partSpec = i.next(); + Map partSpec = i.next(); try { - Partition part = db.getPartition(tab, partSpec, false); - if (part == null) { + List parts = db.getPartitions(tab, partSpec); + if (parts == null) { continue; } - outputs.add(new WriteEntity(part)); + for (Partition part : parts) { + outputs.add(new WriteEntity(part)); + } } catch (HiveException e) { - // Ignore the error if the partition does not exist - if (throwIfNonExistent) { - throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(ast.getChild(index))); - } + if (throwIfNonExistent) { + throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(ast.getChild(index))); + } } } }