diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java index 049de54..8148faa 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; +import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; @@ -379,11 +380,12 @@ private Path createInputFile() throws IOException { List partitions = hive.getPartitions(tbl); List partDesc = new ArrayList(); List partLocs = new ArrayList(); + TableDesc tableDesc = Utilities.getTableDesc(tbl); for (Partition part : partitions) { partLocs.add(part.getDataLocation()); - partDesc.add(Utilities.getPartitionDesc(part)); + partDesc.add(Utilities.getPartitionDescFromTableDesc(tableDesc, part, true)); } - work = new FetchWork(partLocs, partDesc, Utilities.getTableDesc(tbl)); + work = new FetchWork(partLocs, partDesc, tableDesc); work.setLimit(100); } else { work = new FetchWork(tbl.getDataLocation(), Utilities.getTableDesc(tbl)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index d8e463d..6c68df2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -1230,9 +1230,9 @@ public static PartitionDesc getPartitionDesc(Partition part) throws HiveExceptio return (new PartitionDesc(part)); } - public static PartitionDesc getPartitionDescFromTableDesc(TableDesc tblDesc, Partition part) - throws HiveException { - return new PartitionDesc(part, tblDesc); + public static PartitionDesc getPartitionDescFromTableDesc(TableDesc tblDesc, Partition part, + boolean usePartSchemaProperties) throws HiveException { + return new PartitionDesc(part, tblDesc, usePartSchemaProperties); } private static String getOpTreeSkel_helper(Operator op, String indent) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index 29854d8..693d8c7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -487,8 +487,6 @@ public static void setMapWork(MapWork plan, ParseContext parseCtx, Set partDesc = new ArrayList(); Path tblDir = null; - TableDesc tblDesc = null; - plan.setNameToSplitSample(parseCtx.getNameToSplitSample()); if (partsList == null) { @@ -575,6 +573,8 @@ public static void setMapWork(MapWork plan, ParseContext parseCtx, Set } public PartitionDesc(final Partition part) throws HiveException { - this.tableDesc = Utilities.getTableDesc(part.getTable()); + PartitionDescConstructorHelper(part, Utilities.getTableDesc(part.getTable()), true); setProperties(part.getMetadataFromPartitionSchema()); - partSpec = part.getSpec(); - setInputFileFormatClass(part.getInputFormatClass()); - setOutputFileFormatClass(part.getOutputFormatClass()); } - public PartitionDesc(final Partition part,final TableDesc tblDesc) throws HiveException { + public PartitionDesc(final Partition part,final TableDesc tblDesc, boolean usePartSchemaProperties) + throws HiveException { + PartitionDescConstructorHelper(part,tblDesc, usePartSchemaProperties); + if (usePartSchemaProperties) { + setProperties(part.getMetadataFromPartitionSchema()); + } else { + // each partition maintains a large properties + setProperties(part.getSchemaFromTableSchema(tblDesc.getProperties())); + } + } + + private void PartitionDescConstructorHelper(final Partition part,final TableDesc tblDesc, boolean setInputFileFormat) + throws HiveException { this.tableDesc = tblDesc; - setProperties(part.getSchemaFromTableSchema(tblDesc.getProperties())); // each partition maintains a large properties - partSpec = part.getSpec(); - setOutputFileFormatClass(part.getInputFormatClass()); + this.partSpec = part.getSpec(); + if (setInputFileFormat) { + setInputFileFormatClass(part.getInputFormatClass()); + } else { + setOutputFileFormatClass(part.getInputFormatClass()); + } setOutputFileFormatClass(part.getOutputFormatClass()); }