Index: ql/src/test/results/clientnegative/dyn_part_merge.q.out =================================================================== --- ql/src/test/results/clientnegative/dyn_part_merge.q.out (revision 0) +++ ql/src/test/results/clientnegative/dyn_part_merge.q.out (revision 0) @@ -0,0 +1,6 @@ +PREHOOK: query: create table dyn_merge(key string, value string) partitioned by (ds string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table dyn_merge(key string, value string) partitioned by (ds string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@dyn_merge +FAILED: Error in semantic analysis: Dynamic partition does not support merging using non-CombineHiveInputFormat.Please check your hive.input.format setting and make sure your Hadoop version support CombineFileInputFormat. Index: ql/src/test/queries/clientnegative/dyn_part_merge.q =================================================================== --- ql/src/test/queries/clientnegative/dyn_part_merge.q (revision 0) +++ ql/src/test/queries/clientnegative/dyn_part_merge.q (revision 0) @@ -0,0 +1,9 @@ +set hive.exec.dynamic.partition=true; +set hive.exec.dynamic.partition.mode=nonstrict; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +set hive.mergejob.maponly=false; +set hive.merge.mapfiles=true; + +create table dyn_merge(key string, value string) partitioned by (ds string); + +insert overwrite table dyn_merge partition(ds) select key, value, ds from srcpart where ds is not null; Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (revision 1071356) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (working copy) @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRMapJoinCtx; +import org.apache.hadoop.hive.ql.parse.ErrorMsg; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.RowResolver; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; @@ -277,6 +278,9 @@ createMap4Merge(fsOp, ctx, finalName); LOG.info("use CombineHiveInputformat for the merge job"); } else { + if (fsOp.getConf().getDynPartCtx() != null) { + throw new SemanticException(ErrorMsg.DYNAMIC_PARTITION_MERGE.getMsg()); + } createMapReduce4Merge(fsOp, ctx, finalName); LOG.info("use HiveInputFormat for the merge job"); } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (revision 1071356) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (working copy) @@ -143,9 +143,9 @@ + "hive.exec.dynamic.partition=true or specify partition column values"), DYNAMIC_PARTITION_STRICT_MODE("Dynamic partition strict mode requires at least one " + "static partition column. To turn this off set hive.exec.dynamic.partition.mode=nonstrict"), - DYNAMIC_PARTITION_MERGE("Dynamic partition does not support merging mapfiles/mapredfiles yet." - + "Please set hive.merge.mapfiles and hive.merge.mapredfiles to false or use static " - + "partitions"), + DYNAMIC_PARTITION_MERGE("Dynamic partition does not support merging using non-CombineHiveInputFormat." + + "Please check your hive.input.format setting and make sure your Hadoop version support " + + "CombineFileInputFormat."), NONEXISTPARTCOL("Non-Partition column appears in the partition specification: "), UNSUPPORTED_TYPE("DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use " + "STRING instead."),