diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 53208cce0d..ca283c144c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -2694,10 +2694,30 @@ private void constructOneLBLocationMap(FileStatus fSta, Iterator iterator = partitionIterable.iterator(); // Match valid partition path to partitions + List partFieldlList = tbl.getPartCols(); + Map partFieldMap = Collections.synchronizedMap(new LinkedHashMap<>()); + for(FieldSchema field : partFieldlList){ + partFieldMap.put(field.getName(),field); + } while (iterator.hasNext()) { Partition partition = iterator.next(); partitionDetailsMap.entrySet().stream() - .filter(entry -> entry.getValue().fullSpec.equals(partition.getSpec())) + .filter(entry -> { + if(entry.getValue().fullSpec.equals(partition.getSpec())) + return true; + else{ + Set keySet = entry.getValue().fullSpec.keySet(); + for(String key : keySet){ + String val = entry.getValue().fullSpec.get(key); + if(val == null || val.isEmpty()) + return false; + Object partVal = Partition.partSpecToPartObject(partFieldMap.get(key),val); + if(!partVal.toString().equals(partition.getSpec().get(key))) + return false; + } + return true; + } + }) .findAny().ifPresent(entry -> { entry.getValue().partition = partition; entry.getValue().hasOldPartition = true; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java index 136709c6dc..32947afa1b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java @@ -30,6 +30,12 @@ import org.apache.hadoop.hive.common.StringInternUtils; import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; @@ -128,7 +134,10 @@ public Partition(Table tbl, Map partSpec, Path location) throws throw new HiveException("partition spec is invalid; field " + field.getName() + " does not exist or is empty"); } - pvals.add(val); + Object partVal = partSpecToPartObject(field,val); + if(!partVal.toString().equals(val)) + LOG.warn("The partition value is forced to the corresponding type "+field.getType()); + pvals.add(partVal.toString()); } org.apache.hadoop.hive.metastore.api.Partition tpart = @@ -144,6 +153,23 @@ public Partition(Table tbl, Map partSpec, Path location) throws return tpart; } + /** + * transform partSpec string to real part type object + * @param field + * @param partSpecVal + * @return + */ + public static Object partSpecToPartObject(FieldSchema field,String partSpecVal){ + String type = field.getType(); + PrimitiveTypeInfo typeInfo = TypeInfoFactory.getPrimitiveTypeInfo(type); + ObjectInspector outputOi = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( + typeInfo); + ObjectInspector inputOi = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING); + ObjectInspectorConverters.Converter converter = ObjectInspectorConverters.getConverter(inputOi,outputOi); + Object partVal = converter.convert(partSpecVal); + return partVal; + } + /** * Initializes this object with the given variables *