diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 4364f28..3782fad 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -229,12 +229,12 @@ private HashMap opToPartPruner; private HashMap opToPartList; private HashMap> topOps; - private HashMap> topSelOps; + private final HashMap> topSelOps; private LinkedHashMap, OpParseContext> opParseCtx; private List loadTableWork; private List loadFileWork; - private Map joinContext; - private Map smbMapJoinContext; + private final Map joinContext; + private final Map smbMapJoinContext; private final HashMap topToTable; private final Map fsopToTable; private final List reduceSinkOperatorsAddedByEnforceBucketingSorting; @@ -2671,10 +2671,7 @@ private Operator genFilterPlan(ASTNode searchCond, QB qb, Operator input, QB qbSQ_nic = new QB(subQuery.getOuterQueryId(), notInCheck.getAlias(), true); Operator sqnicPlanTopOp = genPlanForSubQueryPredicate(qbSQ_nic, notInCheck); aliasToOpInfo.put(notInCheck.getAlias(), sqnicPlanTopOp); - QBJoinTree joinTree_nic = genSQJoinTree(qb, notInCheck, - input, - aliasToOpInfo); - pushJoinFilters(qb, joinTree_nic, aliasToOpInfo, false); + QBJoinTree joinTree_nic = genSQJoinTree(qb, notInCheck, input, aliasToOpInfo); input = genJoinOperator(qbSQ_nic, joinTree_nic, aliasToOpInfo, input); inputRR = opParseCtx.get(input).getRowResolver(); if ( forHavingClause ) { @@ -2689,10 +2686,6 @@ private Operator genFilterPlan(ASTNode searchCond, QB qb, Operator input, QBJoinTree joinTree = genSQJoinTree(qb, subQuery, input, aliasToOpInfo); - /* - * push filters only for this QBJoinTree. Child QBJoinTrees have already been handled. - */ - pushJoinFilters(qb, joinTree, aliasToOpInfo, false); input = genJoinOperator(qbSQ, joinTree, aliasToOpInfo, input); searchCond = subQuery.updateOuterQueryFilter(clonedSearchCond); } @@ -6453,7 +6446,7 @@ private Operator genFileSinkPlan(String dest, QB qb, Operator input) fileSinkDesc.setWriteType(wt); acidFileSinks.add(fileSinkDesc); } - + fileSinkDesc.setTemporary(destTableIsTemporary); /* Set List Bucketing context. */ @@ -7039,9 +7032,9 @@ private Operator genReduceSinkPlanForSortingBucketing(Table tab, Operator input, private Operator genReduceSinkPlan(String dest, QB qb, Operator input, int numReducers) throws SemanticException { - + RowResolver inputRR = opParseCtx.get(input).getRowResolver(); - + // First generate the expression for the partition and sort keys // The cluster by clause / distribute by clause has the aliases for // partition function @@ -7101,14 +7094,14 @@ private Operator genReduceSinkPlan(String dest, QB qb, Operator input, } return genReduceSinkPlan(input, partCols, sortCols, order.toString(), numReducers); } - + @SuppressWarnings("nls") private Operator genReduceSinkPlan(Operator input, - ArrayList partitionCols, ArrayList sortCols, + ArrayList partitionCols, ArrayList sortCols, String sortOrder, int numReducers) throws SemanticException { RowResolver inputRR = opParseCtx.get(input).getRowResolver(); - + Operator dummy = Operator.createDummy(); dummy.setParentOperators(Arrays.asList(input)); @@ -7667,42 +7660,6 @@ private Operator genJoinPlan(QB qb, Map map) return joinOp; } - /** - * Extract the filters from the join condition and push them on top of the - * source operators. This procedure traverses the query tree recursively, - */ - private void pushJoinFilters(QB qb, QBJoinTree joinTree, - Map map) throws SemanticException { - pushJoinFilters(qb, joinTree, map, true); - } - - /** - * Extract the filters from the join condition and push them on top of the - * source operators. This procedure traverses the query tree recursively, - */ - private void pushJoinFilters(QB qb, QBJoinTree joinTree, - Map map, - boolean recursively) throws SemanticException { - if ( recursively ) { - if (joinTree.getJoinSrc() != null) { - pushJoinFilters(qb, joinTree.getJoinSrc(), map); - } - } - ArrayList> filters = joinTree.getFiltersForPushing(); - int pos = 0; - for (String src : joinTree.getBaseSrc()) { - if (src != null) { - Operator srcOp = map.get(src); - ArrayList filter = filters.get(pos); - for (ASTNode cond : filter) { - srcOp = genFilterPlan(qb, cond, srcOp); - } - map.put(src, srcOp); - } - pos++; - } - } - private List getMapSideJoinTables(QB qb) { List cols = new ArrayList(); @@ -9798,10 +9755,6 @@ public Operator genPlan(QB qb, boolean skipAmbiguityCheck) if (!disableJoinMerge) mergeJoinTree(qb); } - - // if any filters are present in the join tree, push them on top of the - // table - pushJoinFilters(qb, qb.getQbJoinTree(), aliasToOpInfo); srcOpInfo = genJoinPlan(qb, aliasToOpInfo); } else { // Now if there are more than 1 sources then we have a join case @@ -12150,7 +12103,7 @@ Operator genWindowingPlan(WindowingSpec wSpec, Operator input) throws SemanticEx private Operator genReduceSinkPlanForWindowing(WindowingSpec spec, RowResolver inputRR, Operator input) throws SemanticException{ - + ArrayList partCols = new ArrayList(); ArrayList orderCols = new ArrayList(); StringBuilder order = new StringBuilder();