diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java b/ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java index 5e5c054..6f7962e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java @@ -30,7 +30,7 @@ public class RuleExactMatch implements Rule { private final String ruleName; - private final String pattern; + private final String[] pattern; /** * The rule specified as operator names separated by % symbols, the left side represents the @@ -45,7 +45,7 @@ * @param regExp * string specification of the rule **/ - public RuleExactMatch(String ruleName, String pattern) { + public RuleExactMatch(String ruleName, String[] pattern) { this.ruleName = ruleName; this.pattern = pattern; } @@ -62,23 +62,24 @@ public RuleExactMatch(String ruleName, String pattern) { * @return cost of the function * @throws SemanticException */ + @Override public int cost(Stack stack) throws SemanticException { int numElems = (stack != null ? stack.size() : 0); - String name = new String(); - for (int pos = numElems - 1; pos >= 0; pos--) { - name = stack.get(pos).getName() + "%" + name; + if (numElems != pattern.length) { + return -1; } - - if (pattern.equals(name)) { - return 1; + for (int pos = numElems - 1; pos >= 0; pos--) { + if(!stack.get(pos).getName().equals(pattern[pos])) { + return -1; + } } - - return -1; + return numElems; } /** * @return the name of the Node **/ + @Override public String getName() { return ruleName; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java index 108177e..5d375f6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java @@ -35,7 +35,9 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Rule; +import org.apache.hadoop.hive.ql.lib.RuleExactMatch; import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.lib.TypeRule; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; @@ -76,9 +78,8 @@ public static void walkOperatorTree(ParseContext pctx, NodeProcessorCtx opWalker String tsOprName = TableScanOperator.getOperatorName(); String filtOprName = FilterOperator.getOperatorName(); - opRules.put(new RuleRegExp("R1", new StringBuilder().append("(").append(tsOprName).append("%") - .append(filtOprName).append("%)|(").append(tsOprName).append("%").append(filtOprName) - .append("%").append(filtOprName).append("%)").toString()), filterProc); + opRules.put(new RuleExactMatch("R1", new String[] {tsOprName, filtOprName, filtOprName}), filterProc); + opRules.put(new RuleExactMatch("R2", new String[] {tsOprName, filtOprName}), filterProc); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along @@ -111,10 +112,9 @@ public static void walkOperatorTree(ParseContext pctx, NodeProcessorCtx opWalker // the operator stack. The dispatcher // generates the plan from the operator tree Map exprRules = new LinkedHashMap(); - exprRules.put(new RuleRegExp("R1", ExprNodeColumnDesc.class.getName() + "%"), colProc); - exprRules.put(new RuleRegExp("R2", ExprNodeFieldDesc.class.getName() + "%"), fieldProc); - exprRules.put(new RuleRegExp("R5", ExprNodeGenericFuncDesc.class.getName() + "%"), - genFuncProc); + exprRules.put(new TypeRule(ExprNodeColumnDesc.class) , colProc); + exprRules.put(new TypeRule(ExprNodeFieldDesc.class), fieldProc); + exprRules.put(new TypeRule(ExprNodeGenericFuncDesc.class), genFuncProc); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java index f370d4d..a6b8d54 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java @@ -104,10 +104,10 @@ private void inferBucketingSorting(List mapRedTasks) throws Semantic BucketingSortingOpProcFactory.getSelProc()); // Matches only GroupByOperators which are reducers, rather than map group by operators, // or multi group by optimization specific operators - opRules.put(new RuleExactMatch("R2", GroupByOperator.getOperatorName() + "%"), + opRules.put(new RuleExactMatch("R2", new String[]{GroupByOperator.getOperatorName()}), BucketingSortingOpProcFactory.getGroupByProc()); // Matches only JoinOperators which are reducers, rather than map joins, SMB map joins, etc. - opRules.put(new RuleExactMatch("R3", JoinOperator.getOperatorName() + "%"), + opRules.put(new RuleExactMatch("R3", new String[]{JoinOperator.getOperatorName()}), BucketingSortingOpProcFactory.getJoinProc()); opRules.put(new RuleRegExp("R5", FileSinkOperator.getOperatorName() + "%"), BucketingSortingOpProcFactory.getFileSinkProc()); @@ -126,8 +126,8 @@ private void inferBucketingSorting(List mapRedTasks) throws Semantic BucketingSortingOpProcFactory.getForwardProc()); // Matches only ForwardOperators which are reducers and are followed by GroupByOperators // (specific to the multi group by optimization) - opRules.put(new RuleExactMatch("R12", ForwardOperator.getOperatorName() + "%" + - GroupByOperator.getOperatorName() + "%"), + opRules.put(new RuleExactMatch("R12",new String[]{ ForwardOperator.getOperatorName(), + GroupByOperator.getOperatorName()}), BucketingSortingOpProcFactory.getMultiGroupByProc()); // The dispatcher fires the processor corresponding to the closest matching rule and passes diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java index 3a07b17..6a1bef9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java @@ -38,7 +38,9 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Rule; +import org.apache.hadoop.hive.ql.lib.RuleExactMatch; import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.lib.TypeRule; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -267,14 +269,9 @@ public static ExprWalkerInfo extractPushdownPreds(OpWalkerInfo opContext, // the operator stack. The dispatcher // generates the plan from the operator tree Map exprRules = new LinkedHashMap(); - exprRules.put( - new RuleRegExp("R1", ExprNodeColumnDesc.class.getName() + "%"), - getColumnProcessor()); - exprRules.put( - new RuleRegExp("R2", ExprNodeFieldDesc.class.getName() + "%"), - getFieldProcessor()); - exprRules.put(new RuleRegExp("R3", ExprNodeGenericFuncDesc.class.getName() - + "%"), getGenericFuncProcessor()); + exprRules.put(new TypeRule(ExprNodeColumnDesc.class), getColumnProcessor()); + exprRules.put(new TypeRule(ExprNodeFieldDesc.class), getFieldProcessor()); + exprRules.put(new TypeRule(ExprNodeGenericFuncDesc.class), getGenericFuncProcessor()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along @@ -319,9 +316,9 @@ private static void extractFinalCandidates(ExprNodeDesc expr, assert ctx.getNewToOldExprMap().containsKey(expr); for (int i = 0; i < expr.getChildren().size(); i++) { ctx.getNewToOldExprMap().put( - (ExprNodeDesc) expr.getChildren().get(i), + expr.getChildren().get(i), ctx.getNewToOldExprMap().get(expr).getChildren().get(i)); - extractFinalCandidates((ExprNodeDesc) expr.getChildren().get(i), + extractFinalCandidates(expr.getChildren().get(i), ctx, conf); } return;