diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java index 50c76db..df2c969 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java @@ -361,7 +361,7 @@ public boolean equals(Object o) { */ @Override public int hashCode() { - return toString().hashCode(); + return toString().toLowerCase().hashCode(); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java index 7ed50b4..f858f9d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java @@ -127,23 +127,6 @@ public ReadEntity(Path d, boolean islocal) { return parents; } - /** - * Equals function. - */ - @Override - public boolean equals(Object o) { - if (o == null) { - return false; - } - - if (o instanceof ReadEntity) { - ReadEntity ore = (ReadEntity) o; - return (toString().equalsIgnoreCase(ore.toString())); - } else { - return false; - } - } - public boolean isDirect() { return isDirect; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index 7129ed8..ebe1205 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -572,13 +572,22 @@ public static void setMapWork(MapWork plan, ParseContext parseCtx, Set readEntityHashMap = new HashMap(inputs.size()); + for (ReadEntity input : inputs) { + readEntityHashMap.put(input, input); + } + for (Partition part : parts) { if (part.getTable().isPartitioned()) { - PlanUtils.addInput(inputs, new ReadEntity(part, parentViewInfo, isDirectRead)); + PlanUtils.addInput(readEntityHashMap, new ReadEntity(part, parentViewInfo, isDirectRead)); } else { - PlanUtils.addInput(inputs, new ReadEntity(part.getTable(), parentViewInfo, isDirectRead)); + PlanUtils.addInput(readEntityHashMap, new ReadEntity(part.getTable(), parentViewInfo, + isDirectRead)); } - + // Later the properties have to come from the partition as opposed // to from the table in order to support versioning. Path[] paths = null; @@ -682,6 +691,12 @@ public static void setMapWork(MapWork plan, ParseContext parseCtx, Set inputs, ReadEntity newInput) { // make compile happy return null; } + + public static ReadEntity addInput(HashMap inputs, ReadEntity newInput) { + if (inputs.containsKey(newInput)) { + ReadEntity input = inputs.get(newInput); + if ((newInput.getParents() != null) && (!newInput.getParents().isEmpty())) { + input.getParents().addAll(newInput.getParents()); + input.setDirect(input.isDirect() || newInput.isDirect()); + } + return input; + } else { + inputs.put(newInput, newInput); + return newInput; + } + } public static String getExprListString(Collection exprs) { StringBuffer sb = new StringBuffer();