diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index ca86301..cb678df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hive.ql.exec; -import static com.google.common.base.Preconditions.checkNotNull; - import java.beans.DefaultPersistenceDelegate; import java.beans.Encoder; import java.beans.ExceptionListener; @@ -82,7 +80,6 @@ import java.util.zip.InflaterInputStream; import org.antlr.runtime.CommonToken; -import org.apache.calcite.util.ChunkList; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.WordUtils; @@ -2844,11 +2841,7 @@ private static void getMRTasks(List> tasks, List fullPartSpec = new LinkedHashMap(partSpec); - Warehouse.makeSpecFromName(fullPartSpec, partPath); - fullPartSpecs.add(fullPartSpec); + fullPartSpecs.add(extractPartSpecFromPath(partSpec, partPath)); } return fullPartSpecs; } catch (IOException e) { @@ -2856,6 +2849,33 @@ private static void getMRTasks(List> tasks, List extractPartSpecFromPath( + Map partSpec, Path partPath) { + // generate a full partition specification + LinkedHashMap fullPartSpec = new LinkedHashMap(); + Warehouse.makeSpecFromName(fullPartSpec, partPath); + + // retain partSpecs only.. we believe the path is created as part-spec + List nonPartSpecs = new ArrayList(); + for (Map.Entry entry : fullPartSpec.entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + if (!partSpec.containsKey(key)) { + nonPartSpecs.add(key); + continue; + } + String expected = partSpec.get(key); + if (StringUtils.isNotEmpty(expected) && !expected.equals(value)) { + // should not happen + LOG.warn("Path " + entry + " is not consistent with expected " + key + "=" + expected); + } + } + for (String nonPartSpec : nonPartSpecs) { + fullPartSpec.remove(nonPartSpec); + } + return fullPartSpec; + } + public static StatsPublisher getStatsPublisher(JobConf jc) { StatsFactory factory = StatsFactory.newFactory(jc); return factory == null ? null : factory.getStatsPublisher(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 396c070..bc1fd94 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -1590,8 +1590,8 @@ private void constructOneLBLocationMap(FileStatus fSta, "partitions " + partPath + " is not a directory !"; // generate a full partition specification - LinkedHashMap fullPartSpec = new LinkedHashMap(partSpec); - Warehouse.makeSpecFromName(fullPartSpec, partPath); + LinkedHashMap fullPartSpec = + Utilities.extractPartSpecFromPath(partSpec, partPath); Partition newPartition = loadPartition(partPath, tbl, fullPartSpec, replace, holdDDLTime, true, listBucketingEnabled, false, isAcid); partitionsMap.put(fullPartSpec, newPartition); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java index 3f07ea7..857c45b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -30,8 +29,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.Utilities; /** * Conditional task resolution interface. This is invoked at run time to get the @@ -317,11 +316,8 @@ private void generateActualTasks(HiveConf conf, List fullPartSpec = new LinkedHashMap( - dpCtx.getPartSpec()); - Warehouse.makeSpecFromName(fullPartSpec, status[i].getPath()); - PartitionDesc pDesc = new PartitionDesc(tblDesc, (LinkedHashMap) fullPartSpec); - return pDesc; + return new PartitionDesc(tblDesc, + Utilities.extractPartSpecFromPath(dpCtx.getPartSpec(), status[i].getPath())); } private void setupMapRedWork(HiveConf conf, MapWork mWork, long targetSize, long totalSize) {