diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 4b7fc73..a0781e2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -749,7 +749,7 @@ public tableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartit ASTNode partspec = (ASTNode) ast.getChild(1); partitions = new ArrayList(); // partSpec is a mapping from partition column name to its value. - partSpec = new LinkedHashMap(partspec.getChildCount()); + Map tmpPartSpec = new HashMap(partspec.getChildCount()); for (int i = 0; i < partspec.getChildCount(); ++i) { ASTNode partspec_val = (ASTNode) partspec.getChild(i); String val = null; @@ -764,15 +764,21 @@ public tableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartit } else { // in the form of T partition (ds="2010-03-03") val = stripQuotes(partspec_val.getChild(1).getText()); } - partSpec.put(colName, val); + tmpPartSpec.put(colName, val); } // check if the columns, as well as value types in the partition() clause are valid - validatePartSpec(tableHandle, partSpec, ast, conf); + validatePartSpec(tableHandle, tmpPartSpec, ast, conf); + + List parts = tableHandle.getPartitionKeys(); + partSpec = new LinkedHashMap(partspec.getChildCount()); + for (FieldSchema fs : parts) { + String partKey = fs.getName(); + partSpec.put(partKey, tmpPartSpec.get(partKey)); + } // check if the partition spec is valid if (numDynParts > 0) { - List parts = tableHandle.getPartitionKeys(); int numStaPart = parts.size() - numDynParts; if (numStaPart == 0 && conf.getVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE).equalsIgnoreCase("strict")) { diff --git ql/src/test/queries/clientpositive/loadpart2.q ql/src/test/queries/clientpositive/loadpart2.q new file mode 100644 index 0000000..a252eaa --- /dev/null +++ ql/src/test/queries/clientpositive/loadpart2.q @@ -0,0 +1,9 @@ + +create table hive_test ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as textfile; +load data local inpath '../../data/files/test.dat' overwrite into table hive_test partition (pcol1='part1',pcol2='part1') ; +load data local inpath '../../data/files/test.dat' overwrite into table hive_test partition (pcol2='part2',pcol1='part2') ; +select * from hive_test where pcol1='part1' and pcol2='part1'; +select * from hive_test where pcol1='part2' and pcol2='part2'; + + + diff --git ql/src/test/results/clientnegative/dyn_part4.q.out ql/src/test/results/clientnegative/dyn_part4.q.out index 43f1e4d..862e1b5 100644 --- ql/src/test/results/clientnegative/dyn_part4.q.out +++ ql/src/test/results/clientnegative/dyn_part4.q.out @@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table nzhang_part4 (key string) partitioned by (ds string, hr string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@nzhang_part4 -FAILED: SemanticException [Error 10125]: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr, value), while the partitions specified in the query are: (value, ds, hr). +FAILED: SemanticException [Error 10094]: Line 3:46 Dynamic partition cannot be the parent of a static partition 'hr' diff --git ql/src/test/results/clientpositive/loadpart2.q.out ql/src/test/results/clientpositive/loadpart2.q.out new file mode 100644 index 0000000..ba89d5b --- /dev/null +++ ql/src/test/results/clientpositive/loadpart2.q.out @@ -0,0 +1,51 @@ +PREHOOK: query: create table hive_test ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as textfile +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table hive_test ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@hive_test +PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test partition (pcol1='part1',pcol2='part1') +PREHOOK: type: LOAD +PREHOOK: Output: default@hive_test +POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test partition (pcol1='part1',pcol2='part1') +POSTHOOK: type: LOAD +POSTHOOK: Output: default@hive_test +POSTHOOK: Output: default@hive_test@pcol1=part1/pcol2=part1 +PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test partition (pcol2='part2',pcol1='part2') +PREHOOK: type: LOAD +PREHOOK: Output: default@hive_test +POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test partition (pcol2='part2',pcol1='part2') +POSTHOOK: type: LOAD +POSTHOOK: Output: default@hive_test +POSTHOOK: Output: default@hive_test@pcol1=part2/pcol2=part2 +PREHOOK: query: select * from hive_test where pcol1='part1' and pcol2='part1' +PREHOOK: type: QUERY +PREHOOK: Input: default@hive_test +PREHOOK: Input: default@hive_test@pcol1=part1/pcol2=part1 +#### A masked pattern was here #### +POSTHOOK: query: select * from hive_test where pcol1='part1' and pcol2='part1' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@hive_test +POSTHOOK: Input: default@hive_test@pcol1=part1/pcol2=part1 +#### A masked pattern was here #### +1 part1 part1 +2 part1 part1 +3 part1 part1 +4 part1 part1 +5 part1 part1 +6 part1 part1 +PREHOOK: query: select * from hive_test where pcol1='part2' and pcol2='part2' +PREHOOK: type: QUERY +PREHOOK: Input: default@hive_test +PREHOOK: Input: default@hive_test@pcol1=part2/pcol2=part2 +#### A masked pattern was here #### +POSTHOOK: query: select * from hive_test where pcol1='part2' and pcol2='part2' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@hive_test +POSTHOOK: Input: default@hive_test@pcol1=part2/pcol2=part2 +#### A masked pattern was here #### +1 part2 part2 +2 part2 part2 +3 part2 part2 +4 part2 part2 +5 part2 part2 +6 part2 part2