diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java index 76b1f01844b1eac4da2b19db1ad2313d7ee64a03..803f476a1b74fea625e75da503cf4a2183307f0a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java @@ -367,7 +367,7 @@ public boolean equals(Object o) { */ @Override public int hashCode() { - return toString().hashCode(); + return toString().toLowerCase().hashCode(); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java index 8b6a9239efffd5e864c79cf91a1a5fab5cb2cca4..021bf1f818df67d6a41d44634087a7af919cba68 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java @@ -135,23 +135,6 @@ public ReadEntity(Path d, boolean islocal) { return parents; } - /** - * Equals function. - */ - @Override - public boolean equals(Object o) { - if (o == null) { - return false; - } - - if (o instanceof ReadEntity) { - ReadEntity ore = (ReadEntity) o; - return (toString().equalsIgnoreCase(ore.toString())); - } else { - return false; - } - } - public boolean isDirect() { return isDirect; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index 2f36f044cbd8c0cbc3d955a4ddb9f0586c88e860..5c7c71af339ee1759546eefa2791d1c5bb8567c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -581,13 +581,9 @@ public static void setMapWork(MapWork plan, ParseContext parseCtx, Set> mvTasks, HiveConf hco // There are separate configuration parameters to control whether to // merge for a map-only job // or for a map-reduce job - if (currTask.getWork() instanceof MapredWork) { + if (currTask.getWork() instanceof MapredWork) { ReduceWork reduceWork = ((MapredWork) currTask.getWork()).getReduceWork(); boolean mergeMapOnly = hconf.getBoolVar(ConfVars.HIVEMERGEMAPFILES) && reduceWork == null; @@ -1788,7 +1785,7 @@ public static Path createMoveTask(Task currTask, boolean return Collections.emptyList(); } - public static List getInputPathsForPartialScan(QBParseInfo parseInfo, StringBuffer aggregationKey) + public static List getInputPathsForPartialScan(QBParseInfo parseInfo, StringBuffer aggregationKey) throws SemanticException { List inputPaths = new ArrayList(); switch (parseInfo.getTableSpec().specType) { diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index fdc1f6226670ee5c00e4beec54d516e57317f7a0..a72c085db9041a85d5ba132656f886a1df5d259d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -22,6 +22,7 @@ import java.util.Collection; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -52,6 +53,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.HiveUtils; +import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -931,6 +933,42 @@ public static ReadEntity addInput(Set inputs, ReadEntity newInput) { // make compile happy return null; } + + public static void addPartitionInputs(Collection parts, Collection inputs, + ReadEntity parentViewInfo, boolean isDirectRead) { + + // Store the inputs in a HashMap since we can't get a ReadEntity from inputs since it is + // implemented as a set.ReadEntity is used as the key so that the HashMap has the same behavior + // of equals and hashCode + Map readEntityMap = new HashMap(inputs.size()); + for (ReadEntity input : inputs) { + readEntityMap.put(input, input); + } + + for (Partition part : parts) { + ReadEntity newInput = null; + if (part.getTable().isPartitioned()) { + newInput = new ReadEntity(part, parentViewInfo, isDirectRead); + } else { + newInput = new ReadEntity(part.getTable(), parentViewInfo, isDirectRead); + } + + if (readEntityMap.containsKey(newInput)) { + ReadEntity input = readEntityMap.get(newInput); + if ((newInput.getParents() != null) && (!newInput.getParents().isEmpty())) { + input.getParents().addAll(newInput.getParents()); + input.setDirect(input.isDirect() || newInput.isDirect()); + } + } else { + readEntityMap.put(newInput, newInput); + } + } + + // Add the new ReadEntity that were added to readEntityMap in PlanUtils.addInput + if (inputs.size() != readEntityMap.size()) { + inputs.addAll(readEntityMap.keySet()); + } + } public static String getExprListString(Collection exprs) { StringBuffer sb = new StringBuffer();