diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/JoinTypeCheckCtx.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/JoinTypeCheckCtx.java index bbd4723..dccd1d9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/JoinTypeCheckCtx.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/JoinTypeCheckCtx.java @@ -53,7 +53,7 @@ public JoinTypeCheckCtx(RowResolver leftRR, RowResolver rightRR, JoinType hiveJoinType) throws SemanticException { - super(RowResolver.getCombinedRR(leftRR, rightRR), false, false, false, false, false, false, + super(RowResolver.getCombinedRR(leftRR, rightRR), true, false, false, false, false, false, false, false, false); this.inputRRLst = ImmutableList.of(leftRR, rightRR); this.outerJoin = (hiveJoinType == JoinType.LEFTOUTER) || (hiveJoinType == JoinType.RIGHTOUTER) diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java index 18f0180..bf1b5d4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java @@ -18,9 +18,16 @@ package org.apache.hadoop.hive.ql.parse; -import java.util.*; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Queue; +import java.util.Set; +import java.util.Stack; -import org.apache.hadoop.hive.common.JavaUtils; +import org.antlr.runtime.tree.Tree; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -266,4 +273,44 @@ public static boolean containsTokenOfType(ASTNode root, PTFUtils.Predicate stack = new Stack(); + stack.push(node); + Stack otherStack = new Stack(); + otherStack.push(otherNode); + + while (!stack.empty() && !otherStack.empty()) { + Tree p = stack.pop(); + Tree otherP = otherStack.pop(); + + if (p.isNil() != otherP.isNil()) { + return false; + } + if (!p.isNil()) { + if (!p.toString().equals(otherP.toString())) { + return false; + } + } + if (p.getChildCount() != otherP.getChildCount()) { + return false; + } + for (int i = p.getChildCount()-1; i >= 0; i--) { + Tree t = p.getChild(i); + stack.push(t); + Tree otherT = otherP.getChild(i); + otherStack.push(otherT); + } + } + + return stack.empty() && otherStack.empty(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index aab4250..0c191da 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2625,7 +2625,7 @@ private Operator genHavingPlan(String dest, QB qb, Operator input, * so we invoke genFilterPlan to handle SubQuery algebraic transformation, * just as is done for SubQuery predicates appearing in the Where Clause. */ - Operator output = genFilterPlan(condn, qb, input, aliasToOpInfo, true); + Operator output = genFilterPlan(condn, qb, input, aliasToOpInfo, true, false); output = putOpInsertMap(output, inputRR); return output; } @@ -2644,7 +2644,7 @@ private Operator genPlanForSubQueryPredicate( @SuppressWarnings("nls") private Operator genFilterPlan(ASTNode searchCond, QB qb, Operator input, Map aliasToOpInfo, - boolean forHavingClause) + boolean forHavingClause, boolean forGroupByClause) throws SemanticException { OpParseContext inputCtx = opParseCtx.get(input); @@ -2786,7 +2786,7 @@ private Operator genFilterPlan(ASTNode searchCond, QB qb, Operator input, } } - return genFilterPlan(qb, searchCond, input); + return genFilterPlan(qb, searchCond, input, forHavingClause || forGroupByClause); } /** @@ -2800,13 +2800,13 @@ private Operator genFilterPlan(ASTNode searchCond, QB qb, Operator input, * the input operator */ @SuppressWarnings("nls") - private Operator genFilterPlan(QB qb, ASTNode condn, Operator input) + private Operator genFilterPlan(QB qb, ASTNode condn, Operator input, boolean useCaching) throws SemanticException { OpParseContext inputCtx = opParseCtx.get(input); RowResolver inputRR = inputCtx.getRowResolver(); Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild( - new FilterDesc(genExprNodeDesc(condn, inputRR), false), new RowSchema( + new FilterDesc(genExprNodeDesc(condn, inputRR, useCaching), false), new RowSchema( inputRR.getColumnInfos()), input), inputRR); if (LOG.isDebugEnabled()) { @@ -5414,7 +5414,7 @@ private Operator genGroupByPlan1ReduceMultiGBY(List dests, QB qb, Operat if (parseInfo.getWhrForClause(dest) != null) { ASTNode whereExpr = qb.getParseInfo().getWhrForClause(dest); - curr = genFilterPlan((ASTNode) whereExpr.getChild(0), qb, forwardOp, aliasToOpInfo, false); + curr = genFilterPlan((ASTNode) whereExpr.getChild(0), qb, forwardOp, aliasToOpInfo, false, true); } // Generate GroupbyOperator @@ -7559,7 +7559,7 @@ private Operator genJoinOperator(QB qb, QBJoinTree joinTree, if ( joinSrcOp != null ) { ArrayList filter = joinTree.getFiltersForPushing().get(0); for (ASTNode cond : filter) { - joinSrcOp = genFilterPlan(qb, cond, joinSrcOp); + joinSrcOp = genFilterPlan(qb, cond, joinSrcOp, false); } } @@ -7615,7 +7615,7 @@ private Operator genJoinOperator(QB qb, QBJoinTree joinTree, Operator op = joinOp; for(ASTNode condn : joinTree.getPostJoinFilters() ) { - op = genFilterPlan(qb, condn, op); + op = genFilterPlan(qb, condn, op, false); } return op; } @@ -7788,7 +7788,7 @@ private void pushJoinFilters(QB qb, QBJoinTree joinTree, Operator srcOp = map.get(src); ArrayList filter = filters.get(pos); for (ASTNode cond : filter) { - srcOp = genFilterPlan(qb, cond, srcOp); + srcOp = genFilterPlan(qb, cond, srcOp, false); } map.put(src, srcOp); } @@ -8831,7 +8831,7 @@ private Operator genBodyPlan(QB qb, Operator input, Map aliasT if (qbp.getWhrForClause(dest) != null) { ASTNode whereExpr = qb.getParseInfo().getWhrForClause(dest); - curr = genFilterPlan((ASTNode) whereExpr.getChild(0), qb, curr, aliasToOpInfo, false); + curr = genFilterPlan((ASTNode) whereExpr.getChild(0), qb, curr, aliasToOpInfo, false, false); } // Preserve operator before the GBY - we'll use it to resolve '*' Operator gbySource = curr; @@ -10425,7 +10425,12 @@ public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input) throws SemanticException { // Since the user didn't supply a customized type-checking context, // use default settings. - TypeCheckCtx tcCtx = new TypeCheckCtx(input); + return genExprNodeDesc(expr, input, true); + } + + public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, boolean useCaching) + throws SemanticException { + TypeCheckCtx tcCtx = new TypeCheckCtx(input, useCaching); return genExprNodeDesc(expr, input, tcCtx); } @@ -10453,7 +10458,10 @@ public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, // build the exprNodeFuncDesc with recursively built children. // If the current subExpression is pre-calculated, as in Group-By etc. - ExprNodeDesc cached = getExprNodeDescCached(expr, input); + ExprNodeDesc cached = null; + if (tcCtx.isUseCaching()) { + cached = getExprNodeDescCached(expr, input); + } if (cached == null) { Map allExprs = genAllExprNodeDesc(expr, input, tcCtx); return allExprs.get(expr); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java index b19e2bf..8ad28be 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java @@ -35,6 +35,8 @@ */ private RowResolver inputRR; + private final boolean useCaching; + /** * Receives translations which will need to be applied during unparse. */ @@ -77,15 +79,20 @@ * The input row resolver of the previous operator. */ public TypeCheckCtx(RowResolver inputRR) { - this(inputRR, false, true, true, true, true, true, true, true); + this(inputRR, true); + } + + public TypeCheckCtx(RowResolver inputRR, boolean useCaching) { + this(inputRR, useCaching, false, true, true, true, true, true, true, true); } - public TypeCheckCtx(RowResolver inputRR, boolean allowStatefulFunctions, + public TypeCheckCtx(RowResolver inputRR, boolean useCaching, boolean allowStatefulFunctions, boolean allowDistinctFunctions, boolean allowGBExprElimination, boolean allowAllColRef, boolean allowFunctionStar, boolean allowWindowing, boolean allowIndexExpr, boolean allowSubQueryExpr) { setInputRR(inputRR); error = null; + this.useCaching = useCaching; this.allowStatefulFunctions = allowStatefulFunctions; this.allowDistinctFunctions = allowDistinctFunctions; this.allowGBExprElimination = allowGBExprElimination; @@ -198,4 +205,8 @@ public boolean getallowIndexExpr() { public boolean getallowSubQueryExpr() { return allowSubQueryExpr; } + + public boolean isUseCaching() { + return useCaching; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index 0e97530..d823f03 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -124,6 +124,10 @@ public static ExprNodeDesc processGByExpr(Node nd, Object procCtx) ASTNode expr = (ASTNode) nd; TypeCheckCtx ctx = (TypeCheckCtx) procCtx; + if (!ctx.isUseCaching()) { + return null; + } + RowResolver input = ctx.getInputRR(); ExprNodeDesc desc = null;