Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1352422) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -652,8 +652,8 @@ alterStatementSuffixAddPartitions @init { msgs.push("add partition statement"); } @after { msgs.pop(); } - : Identifier KW_ADD ifNotExists? partitionSpec partitionLocation? (partitionSpec partitionLocation?)* - -> ^(TOK_ALTERTABLE_ADDPARTS Identifier ifNotExists? (partitionSpec partitionLocation?)+) + : Identifier KW_ADD ifNotExists? partitionSpecWithLocation (partitionSpecWithLocation)* + -> ^(TOK_ALTERTABLE_ADDPARTS Identifier ifNotExists? (partitionSpecWithLocation)+) ; alterStatementSuffixTouch @@ -677,6 +677,13 @@ -> ^(TOK_ALTERTABLE_UNARCHIVE Identifier (partitionSpec)*) ; +partitionSpecWithLocation + : + KW_PARTITION + LPAREN partitionVal (COMMA partitionVal )* RPAREN partitionLocation? -> ^(TOK_PARTSPEC partitionVal+ partitionLocation? ) + ; + + partitionLocation @init { msgs.push("partition location"); } @after { msgs.pop(); } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1352422) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -1868,11 +1868,8 @@ } // partition name to value - List> partSpecs = getPartitionSpecs(ast); - addTablePartsOutputs(tblName, partSpecs); + List> partSpecs = new ArrayList>(); - Iterator> partIter = partSpecs.iterator(); - String currentLocation = null; Map currentPart = null; boolean ifNotExists = false; @@ -1885,36 +1882,40 @@ case HiveParser.TOK_IFNOTEXISTS: ifNotExists = true; break; - case HiveParser.TOK_PARTSPEC: - if (currentPart != null) { - validatePartitionValues(currentPart); - AddPartitionDesc addPartitionDesc = new AddPartitionDesc( + case HiveParser.TOK_PARTSPEC: + // create new partition, set values + currentLocation = null; + //move getPartitionSpecs here and insert location check + currentPart = new LinkedHashMap(); + for (int i = 0; i < child.getChildCount(); ++i) { + CommonTree partspec_val_or_location = (CommonTree) child.getChild(i); + switch(partspec_val_or_location.getToken().getType()){ + case HiveParser.TOK_PARTVAL: + String val = stripQuotes(partspec_val_or_location.getChild(1).getText()); + currentPart.put(partspec_val_or_location.getChild(0).getText().toLowerCase(), val); + break; + case HiveParser.TOK_PARTITIONLOCATION: + currentLocation = unescapeSQLString(partspec_val_or_location.getChild(0).getText()); + break; + default: + throw new SemanticException("Unknown child: " + child); + } + } + //add partSpec + validatePartitionValues(currentPart); + partSpecs.add(currentPart); + //add location description + AddPartitionDesc addPartitionDesc = new AddPartitionDesc( db.getCurrentDatabase(), tblName, currentPart, currentLocation, ifNotExists, expectView); - partitionDescs.add(addPartitionDesc); - } - // create new partition, set values - currentLocation = null; - currentPart = partIter.next(); + partitionDescs.add(addPartitionDesc); break; - case HiveParser.TOK_PARTITIONLOCATION: - // if location specified, set in partition - currentLocation = unescapeSQLString(child.getChild(0).getText()); - break; default: throw new SemanticException("Unknown child: " + child); - } + } + addTablePartsOutputs(tblName, partSpecs); } - // add the last one - if (currentPart != null) { - validatePartitionValues(currentPart); - AddPartitionDesc addPartitionDesc = new AddPartitionDesc( - db.getCurrentDatabase(), tblName, currentPart, - currentLocation, ifNotExists, expectView); - partitionDescs.add(addPartitionDesc); - } - for (AddPartitionDesc addPartitionDesc : partitionDescs) { rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), addPartitionDesc), conf));