Index: ql/src/test/results/clientpositive/add_multiple_part.q.out =================================================================== --- ql/src/test/results/clientpositive/add_multiple_part.q.out (revision 0) +++ ql/src/test/results/clientpositive/add_multiple_part.q.out (revision 0) @@ -0,0 +1,87 @@ +PREHOOK: query: CREATE TABLE add_multi_part(a int, b int) PARTITIONED BY (insertdate string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE add_multi_part(a int, b int) PARTITIONED BY (insertdate string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@add_multi_part +PREHOOK: query: SHOW PARTITIONS add_multi_part +PREHOOK: type: SHOWPARTITIONS +POSTHOOK: query: SHOW PARTITIONS add_multi_part +POSTHOOK: type: SHOWPARTITIONS +PREHOOK: query: ALTER TABLE add_multi_part ADD +#### A masked pattern was here #### +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@add_multi_part +POSTHOOK: query: ALTER TABLE add_multi_part ADD +#### A masked pattern was here #### +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@add_multi_part +POSTHOOK: Output: default@add_multi_part@insertdate=2012%2F06%2F25 +POSTHOOK: Output: default@add_multi_part@insertdate=2012%2F06%2F26 +PREHOOK: query: DESCRIBE FORMATTED add_multi_part PARTITION(insertdate="2012/06/25") +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED add_multi_part PARTITION(insertdate="2012/06/25") +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +a int None +b int None + +# Partition Information +# col_name data_type comment + +insertdate string None + +# Detailed Partition Information +Partition Value: [2012/06/25] +Database: default +Table: add_multi_part +#### A masked pattern was here #### +Protect Mode: None +...(omitted_part_of_path_to_escape_pattern_mask).../add_multi_part/2012/06/25 +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: DESCRIBE FORMATTED add_multi_part PARTITION(insertdate="2012/06/26") +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED add_multi_part PARTITION(insertdate="2012/06/26") +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +a int None +b int None + +# Partition Information +# col_name data_type comment + +insertdate string None + +# Detailed Partition Information +Partition Value: [2012/06/26] +Database: default +Table: add_multi_part +#### A masked pattern was here #### +Protect Mode: None +...(omitted_part_of_path_to_escape_pattern_mask).../add_multi_part/2012/06/26 +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (revision 1353409) +++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (working copy) @@ -916,6 +916,39 @@ out.close(); } + public void transformOutputForResultChecking(String tname) throws Exception { + //transform location information to escape pattern mask for add_multiple_part test + //we wrap the file rewrite process inside 'if' to save time for normal test cases. + if (tname.equals("add_multiple_part.q")){ + String fname = new File(logDir, tname + ".out").getPath(); + String line; + BufferedReader in; + BufferedWriter out; + + in = new BufferedReader(new FileReader(fname)); + out = new BufferedWriter(new FileWriter(fname + ".orig1")); + while (null != (line = in.readLine())) { + out.write(line); + out.write('\n'); + } + in.close(); + out.close(); + //replace the regular expression + in = new BufferedReader(new FileReader(fname + ".orig1")); + out = new BufferedWriter(new FileWriter(fname)); + + while (null != (line = in.readLine())) { + line = line.replaceAll("Location:.*warehouse", "...(omitted_part_of_path_to_escape_pattern_mask)..."); + out.write(line); + out.write('\n'); + } + + in.close(); + out.close(); + } + + } + public int checkCliDriverResults(String tname) throws Exception { String[] cmdArray; String[] patterns; @@ -923,6 +956,8 @@ String outFileName = outPath(outDir, tname + ".out"); + //transform the output if necessary + transformOutputForResultChecking(tname); patterns = new String[] { ".*file:.*", ".*pfile:.*", Index: ql/src/test/queries/clientpositive/add_multiple_part.q =================================================================== --- ql/src/test/queries/clientpositive/add_multiple_part.q (revision 0) +++ ql/src/test/queries/clientpositive/add_multiple_part.q (revision 0) @@ -0,0 +1,9 @@ +CREATE TABLE add_multi_part(a int, b int) PARTITIONED BY (insertdate string); +SHOW PARTITIONS add_multi_part; + +ALTER TABLE add_multi_part ADD + PARTITION (insertdate="2012/06/25") LOCATION '2012/06/25' + PARTITION (insertdate="2012/06/26") LOCATION '2012/06/26' +; +DESCRIBE FORMATTED add_multi_part PARTITION(insertdate="2012/06/25"); +DESCRIBE FORMATTED add_multi_part PARTITION(insertdate="2012/06/26"); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1353409) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -652,8 +652,8 @@ alterStatementSuffixAddPartitions @init { msgs.push("add partition statement"); } @after { msgs.pop(); } - : Identifier KW_ADD ifNotExists? partitionSpec partitionLocation? (partitionSpec partitionLocation?)* - -> ^(TOK_ALTERTABLE_ADDPARTS Identifier ifNotExists? (partitionSpec partitionLocation?)+) + : Identifier KW_ADD ifNotExists? partitionSpecWithLocation (partitionSpecWithLocation)* + -> ^(TOK_ALTERTABLE_ADDPARTS Identifier ifNotExists? (partitionSpecWithLocation)+) ; alterStatementSuffixTouch @@ -677,6 +677,13 @@ -> ^(TOK_ALTERTABLE_UNARCHIVE Identifier (partitionSpec)*) ; +partitionSpecWithLocation + : + KW_PARTITION + LPAREN partitionVal (COMMA partitionVal )* RPAREN partitionLocation? -> ^(TOK_PARTSPEC partitionVal+ partitionLocation? ) + ; + + partitionLocation @init { msgs.push("partition location"); } @after { msgs.pop(); } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1353409) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -1868,11 +1868,8 @@ } // partition name to value - List> partSpecs = getPartitionSpecs(ast); - addTablePartsOutputs(tblName, partSpecs); + List> partSpecs = new ArrayList>(); - Iterator> partIter = partSpecs.iterator(); - String currentLocation = null; Map currentPart = null; boolean ifNotExists = false; @@ -1885,36 +1882,40 @@ case HiveParser.TOK_IFNOTEXISTS: ifNotExists = true; break; - case HiveParser.TOK_PARTSPEC: - if (currentPart != null) { - validatePartitionValues(currentPart); - AddPartitionDesc addPartitionDesc = new AddPartitionDesc( + case HiveParser.TOK_PARTSPEC: + // create new partition, set values + currentLocation = null; + //move getPartitionSpecs here and insert location check + currentPart = new LinkedHashMap(); + for (int i = 0; i < child.getChildCount(); ++i) { + CommonTree partspec_val_or_location = (CommonTree) child.getChild(i); + switch(partspec_val_or_location.getToken().getType()){ + case HiveParser.TOK_PARTVAL: + String val = stripQuotes(partspec_val_or_location.getChild(1).getText()); + currentPart.put(partspec_val_or_location.getChild(0).getText().toLowerCase(), val); + break; + case HiveParser.TOK_PARTITIONLOCATION: + currentLocation = unescapeSQLString(partspec_val_or_location.getChild(0).getText()); + break; + default: + throw new SemanticException("Unknown child: " + child); + } + } + //add partSpec + validatePartitionValues(currentPart); + partSpecs.add(currentPart); + //add location description + AddPartitionDesc addPartitionDesc = new AddPartitionDesc( db.getCurrentDatabase(), tblName, currentPart, currentLocation, ifNotExists, expectView); - partitionDescs.add(addPartitionDesc); - } - // create new partition, set values - currentLocation = null; - currentPart = partIter.next(); + partitionDescs.add(addPartitionDesc); break; - case HiveParser.TOK_PARTITIONLOCATION: - // if location specified, set in partition - currentLocation = unescapeSQLString(child.getChild(0).getText()); - break; default: throw new SemanticException("Unknown child: " + child); - } + } + addTablePartsOutputs(tblName, partSpecs); } - // add the last one - if (currentPart != null) { - validatePartitionValues(currentPart); - AddPartitionDesc addPartitionDesc = new AddPartitionDesc( - db.getCurrentDatabase(), tblName, currentPart, - currentLocation, ifNotExists, expectView); - partitionDescs.add(addPartitionDesc); - } - for (AddPartitionDesc addPartitionDesc : partitionDescs) { rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), addPartitionDesc), conf));