diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index c2d5c8c..681e7ea 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -5665,28 +5665,6 @@ private Operator genGroupByPlanMapAggr2MR(String dest, QB qb, } } - @SuppressWarnings("nls") - private Operator genConversionOps(String dest, QB qb, Operator input) - throws SemanticException { - - Integer dest_type = qb.getMetaData().getDestTypeForAlias(dest); - switch (dest_type.intValue()) { - case QBMetaData.DEST_TABLE: { - qb.getMetaData().getDestTableForAlias(dest); - break; - } - case QBMetaData.DEST_PARTITION: { - qb.getMetaData().getDestPartitionForAlias(dest).getTable(); - break; - } - default: { - return input; - } - } - - return input; - } - private int getReducersBucketing(int totalFiles, int maxReducers) { int numFiles = (int)Math.ceil((double)totalFiles / (double)maxReducers); while (true) { @@ -8883,7 +8861,6 @@ private Operator genPostGroupByBodyPlan(Operator curr, String dest, QB qb, .getOrderByForClause(dest) != null ? false : true); } } else { - curr = genConversionOps(dest, qb, curr); // exact limit can be taken care of by the fetch operator if (limit != null) { boolean extraMRStep = true;