diff --git accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloRangeGenerator.java accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloRangeGenerator.java index 21392d1..6bc9aa2 100644 --- accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloRangeGenerator.java +++ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloRangeGenerator.java @@ -20,7 +20,8 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.accumulo.core.data.Range; import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloRowIdColumnMapping; @@ -72,7 +73,7 @@ public AccumuloRangeGenerator(AccumuloPredicateHandler predicateHandler, } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // If it's not some operator, pass it back if (!(nd instanceof ExprNodeGenericFuncDesc)) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java index 63be7b7..8633922 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java @@ -21,7 +21,8 @@ import java.util.HashMap; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.Deque; +import java.util.ArrayDeque; import org.antlr.runtime.ANTLRStringStream; import org.antlr.runtime.CharStream; @@ -528,7 +529,7 @@ private static void makeFilterForEquals(String keyName, String value, String par /** * The node stack used to keep track of the tree nodes during parsing. */ - private final Stack nodeStack = new Stack(); + private final Deque nodeStack = new ArrayDeque(); public TreeNode getRoot() { return this.root; diff --git ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java index 1a107d7..ba64475 100644 --- ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java @@ -19,13 +19,13 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; @@ -141,7 +141,7 @@ public ExprNodeDesc analyzePredicate( Map opRules = new LinkedHashMap(); NodeProcessor nodeProcessor = new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/CompositeProcessor.java ql/src/java/org/apache/hadoop/hive/ql/lib/CompositeProcessor.java index d7d5e80..804f305 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/CompositeProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/CompositeProcessor.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hive.ql.lib; -import java.util.Stack; - import org.apache.hadoop.hive.ql.parse.SemanticException; +import java.util.Deque; + /** * CompositeProcessor. Holds a list of node processors to be fired by the same * rule. @@ -35,7 +35,7 @@ public CompositeProcessor(NodeProcessor...nodeProcessors) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { for (NodeProcessor proc: procs) { proc.process(nd, stack, procCtx, nodeOutputs); diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java index d452f50..230e812 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java @@ -26,7 +26,8 @@ import java.util.List; import java.util.Queue; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -42,7 +43,7 @@ * opStack keeps the nodes that have been visited, but have not been * dispatched yet */ - protected final Stack opStack; + protected final Deque opStack; /** * opQueue keeps the nodes in the order that the were dispatched. * Then it is used to go through the processed nodes and store @@ -65,7 +66,7 @@ */ public DefaultGraphWalker(Dispatcher disp) { dispatcher = disp; - opStack = new Stack(); + opStack = new ArrayDeque(); opQueue = new LinkedList(); } @@ -85,14 +86,14 @@ public DefaultGraphWalker(Dispatcher disp) { * stack of nodes encountered * @throws SemanticException */ - public void dispatch(Node nd, Stack ndStack) throws SemanticException { + public void dispatch(Node nd, Deque ndStack) throws SemanticException { dispatchAndReturn(nd, ndStack); } /** * Returns dispatch result */ - public T dispatchAndReturn(Node nd, Stack ndStack) throws SemanticException { + public T dispatchAndReturn(Node nd, Deque ndStack) throws SemanticException { Object[] nodeOutputs = null; if (nd.getChildren() != null) { nodeOutputs = new Object[nd.getChildren().size()]; @@ -148,7 +149,7 @@ protected void walk(Node nd) throws SemanticException { opStack.push(nd); // While there are still nodes to dispatch... - while (!opStack.empty()) { + while (!opStack.isEmpty()) { Node node = opStack.peek(); if (node.getChildren() == null || diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java index 8643563..76650ee 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java @@ -19,7 +19,8 @@ package org.apache.hadoop.hive.ql.lib; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -61,7 +62,7 @@ public DefaultRuleDispatcher(NodeProcessor defaultProc, * @throws SemanticException */ @Override - public Object dispatch(Node nd, Stack ndStack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque ndStack, Object... nodeOutputs) throws SemanticException { // find the firing rule diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java index ccbeadf..3cb5834 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.lib; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -41,7 +42,7 @@ * @return Object The return object from the processing call. * @throws SemanticException */ - Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException; } diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/ExpressionWalker.java ql/src/java/org/apache/hadoop/hive/ql/lib/ExpressionWalker.java index 37ad4cb..c057273 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/ExpressionWalker.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/ExpressionWalker.java @@ -66,7 +66,7 @@ protected void walk(Node nd) throws SemanticException { opStack.push(nd); // While there are still nodes to dispatch... - while (!opStack.empty()) { + while (!opStack.isEmpty()) { Node node = opStack.peek(); if (node.getChildren() == null || diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/ForwardWalker.java ql/src/java/org/apache/hadoop/hive/ql/lib/ForwardWalker.java index a10dc52..6ced143 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/ForwardWalker.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/ForwardWalker.java @@ -64,7 +64,7 @@ protected void addAllParents(Node nd) { */ @Override protected void walk(Node nd) throws SemanticException { - if (opStack.empty() || nd != opStack.peek()) { + if (opStack.isEmpty() || nd != opStack.peek()) { opStack.push(nd); } if (allParentsDispatched(nd)) { diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/LevelOrderWalker.java ql/src/java/org/apache/hadoop/hive/ql/lib/LevelOrderWalker.java index cf05d5f..417ae5b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/LevelOrderWalker.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/LevelOrderWalker.java @@ -23,7 +23,8 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -135,7 +136,7 @@ public void startWalking(Collection startNodes, * @throws SemanticException */ @SuppressWarnings("unchecked") - private void walk(Node nd, int level, Stack stack) throws SemanticException { + private void walk(Node nd, int level, Deque stack) throws SemanticException { List> parents = ((Operator)nd).getParentOperators(); @@ -145,9 +146,9 @@ private void walk(Node nd, int level, Stack stack) throws SemanticExceptio } for(Node parent : parents) { - stack.add(0, parent); + stack.addLast(parent); walk(parent, level+1, stack); - stack.remove(0); + stack.pop(); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java index bd83c88..4dbd274 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java @@ -17,7 +17,8 @@ */ package org.apache.hadoop.hive.ql.lib; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -39,6 +40,6 @@ * @return Object to be returned by the process call * @throws SemanticException */ - Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, - Object... nodeOutputs) throws SemanticException; + Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException; } diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/Rule.java ql/src/java/org/apache/hadoop/hive/ql/lib/Rule.java index 29cd113..1f25b54 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/Rule.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/Rule.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.lib; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -33,7 +34,7 @@ * matches * @throws SemanticException */ - int cost(Stack stack) throws SemanticException; + int cost(Deque stack) throws SemanticException; /** * @return the name of the rule - may be useful for debugging diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java index 6f7962e..3cb964a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java @@ -18,7 +18,9 @@ package org.apache.hadoop.hive.ql.lib; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.Iterator; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -63,13 +65,13 @@ public RuleExactMatch(String ruleName, String[] pattern) { * @throws SemanticException */ @Override - public int cost(Stack stack) throws SemanticException { + public int cost(Deque stack) throws SemanticException { int numElems = (stack != null ? stack.size() : 0); if (numElems != pattern.length) { return -1; } for (int pos = numElems - 1; pos >= 0; pos--) { - if(!stack.get(pos).getName().equals(pattern[pos])) { + if(!Utils.get(stack, pos).getName().equals(pattern[pos])) { return -1; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/RuleRegExp.java ql/src/java/org/apache/hadoop/hive/ql/lib/RuleRegExp.java index 1e850d6..cf925a2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/RuleRegExp.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/RuleRegExp.java @@ -21,9 +21,11 @@ import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -125,7 +127,7 @@ public RuleRegExp(String ruleName, String regExp) { * @return cost of the function * @throws SemanticException */ - private int costPatternWithoutWildCardChar(Stack stack) throws SemanticException { + private int costPatternWithoutWildCardChar(Deque stack) throws SemanticException { int numElems = (stack != null ? stack.size() : 0); // No elements @@ -136,7 +138,7 @@ private int costPatternWithoutWildCardChar(Stack stack) throws SemanticExc int patLen = patternWithoutWildCardChar.length(); StringBuilder name = new StringBuilder(patLen + numElems); for (int pos = numElems - 1; pos >= 0; pos--) { - String nodeName = stack.get(pos).getName() + "%"; + String nodeName = Utils.get(stack, pos).getName() + "%"; name.insert(0, nodeName); if (name.length() >= patLen) { if (patternWithoutWildCardChar.contentEquals(name)) { @@ -157,7 +159,7 @@ private int costPatternWithoutWildCardChar(Stack stack) throws SemanticExc * @return cost of the function * @throws SemanticException */ - private int costPatternWithORWildCardChar(Stack stack) throws SemanticException { + private int costPatternWithORWildCardChar(Deque stack) throws SemanticException { int numElems = (stack != null ? stack.size() : 0); // No elements @@ -191,7 +193,7 @@ private int costPatternWithORWildCardChar(Stack stack) throws SemanticExce name.append(cachedNames.get(maxLength)); } for (int pos = maxDepth - 1; pos >= 0; pos--) { - String nodeName = stack.get(pos).getName() + "%"; + String nodeName = Utils.get(stack, pos).getName() + "%"; name.insert(0, nodeName); // We cache the values @@ -222,12 +224,12 @@ private int costPatternWithORWildCardChar(Stack stack) throws SemanticExce * @return cost of the function * @throws SemanticException */ - private int costPatternWithWildCardChar(Stack stack) throws SemanticException { + private int costPatternWithWildCardChar(Deque stack) throws SemanticException { int numElems = (stack != null ? stack.size() : 0); StringBuilder name = new StringBuilder(); Matcher m = patternWithWildCardChar.matcher(""); for (int pos = numElems - 1; pos >= 0; pos--) { - String nodeName = stack.get(pos).getName() + "%"; + String nodeName = Utils.get(stack, pos).getName() + "%"; name.insert(0, nodeName); m.reset(name); if (m.matches()) { @@ -268,7 +270,7 @@ boolean rulePatternIsValidWithORWildCardChar() { * @throws SemanticException */ @Override - public int cost(Stack stack) throws SemanticException { + public int cost(Deque stack) throws SemanticException { if (rulePatternIsValidWithoutWildCardChar()) { return costPatternWithoutWildCardChar(stack); } diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java index 16b74e5..c8caae6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java @@ -25,7 +25,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.ConditionalTask; import org.apache.hadoop.hive.ql.exec.Task; @@ -53,7 +54,7 @@ public void addToDispatchList(Node dispatchedObj){ } } - protected Stack opStack; + protected Deque opStack; private final List toWalk = new ArrayList(); private final HashMap retMap = new HashMap(); private final Dispatcher dispatcher; @@ -67,7 +68,7 @@ public void addToDispatchList(Node dispatchedObj){ */ public TaskGraphWalker(Dispatcher disp) { dispatcher = disp; - opStack = new Stack(); + opStack = new ArrayDeque(); walkerCtx = new TaskGraphWalkerContext(retMap); } @@ -94,7 +95,7 @@ public TaskGraphWalker(Dispatcher disp) { * stack of nodes encountered * @throws SemanticException */ - public void dispatch(Node nd, Stack ndStack,TaskGraphWalkerContext walkerCtx) throws SemanticException { + public void dispatch(Node nd, Deque ndStack,TaskGraphWalkerContext walkerCtx) throws SemanticException { Object[] nodeOutputs = null; if (nd.getChildren() != null) { nodeOutputs = new Object[nd.getChildren().size()+1]; @@ -144,7 +145,7 @@ public void walk(Node nd) throws SemanticException { if (getDispatchedList().contains(nd)) { return; } - if (opStack.empty() || nd != opStack.peek()) { + if (opStack.isEmpty() || nd != opStack.peek()) { opStack.push(nd); } diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/TypeRule.java ql/src/java/org/apache/hadoop/hive/ql/lib/TypeRule.java index 090d163..f2c8c9b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/TypeRule.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/TypeRule.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.lib; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -34,7 +35,7 @@ public TypeRule(Class nodeClass) { } @Override - public int cost(Stack stack) throws SemanticException { + public int cost(Deque stack) throws SemanticException { if (stack == null) { return -1; } diff --git ql/src/java/org/apache/hadoop/hive/ql/lib/Utils.java ql/src/java/org/apache/hadoop/hive/ql/lib/Utils.java index 37f18f6..c8f9054 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lib/Utils.java +++ ql/src/java/org/apache/hadoop/hive/ql/lib/Utils.java @@ -18,7 +18,10 @@ package org.apache.hadoop.hive.ql.lib; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; + +import java.util.Iterator; /** * Contains common utility functions to manipulate nodes, walkers etc. @@ -34,10 +37,10 @@ * * @return Node The Nth ancestor in the path with respect to the current node. */ - public static Node getNthAncestor(Stack st, int n) { + public static Node getNthAncestor(Deque st, int n) { assert(st.size() - 1 >= n); - Stack tmpStack = new Stack(); + Deque tmpStack = new ArrayDeque(); for(int i=0; i<=n; i++) tmpStack.push(st.pop()); @@ -56,12 +59,24 @@ public static Node getNthAncestor(Stack st, int n) { * Returns null if not found. */ @SuppressWarnings("unchecked") - public static T findNode(Stack stack, Class target) { - for (int i = stack.size() - 2; i >= 0; i--) { - if (target.isInstance(stack.get(i))) { - return (T) stack.get(i); + public static T findNode(Deque stack, Class target) { + Iterator iterator = stack.descendingIterator(); + iterator.next(); + while (iterator.hasNext()) { + Object object = iterator.next(); + if (target.isInstance(object)) { + return (T) object; } } return null; } + + public static Node get(Deque stack, int n) { + Iterator iterator = stack.descendingIterator(); + Node node = null; + for (int i = 0; i <= n; i++) { + node = iterator.next(); + } + return node; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java index a0bc19f..2ed0899 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java @@ -28,7 +28,8 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -69,7 +70,7 @@ public AbstractBucketJoinProc() { } @Override - abstract public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + abstract public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException; public static List getBucketFilePathsOfPartition( diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java index b57dc77..30fdf5c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java @@ -24,7 +24,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -67,7 +68,7 @@ public AbstractSMBJoinProc() { } @Override - abstract public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + abstract public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException; /* @@ -78,7 +79,7 @@ abstract public Object process(Node nd, Stack stack, NodeProcessorCtx proc * of the sort columns. */ protected boolean canConvertBucketMapJoinToSMBJoin(MapJoinOperator mapJoinOp, - Stack stack, + Deque stack, SortBucketJoinProcCtx smbJoinContext, Object... nodeOutputs) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/AnnotateReduceSinkOutputOperator.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/AnnotateReduceSinkOutputOperator.java index 0b61f4b..c77503c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/AnnotateReduceSinkOutputOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/AnnotateReduceSinkOutputOperator.java @@ -18,11 +18,7 @@ package org.apache.hadoop.hive.ql.optimizer; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Stack; +import java.util.*; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -58,7 +54,7 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { private static class ReduceSinkOutputOperatorAnnotator implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator rs = (ReduceSinkOperator) nd; List> children = rs.getChildOperators(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java index a649fdf..d9918b2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java @@ -21,7 +21,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -81,7 +82,7 @@ private NodeProcessor getBucketMapjoinProc(ParseContext pctx) { private NodeProcessor getDefaultProc() { return new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapjoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapjoinProc.java index 1b7c500..7bb9bfe 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapjoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapjoinProc.java @@ -19,7 +19,8 @@ import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -38,7 +39,7 @@ public BucketMapjoinProc(ParseContext pGraphContext) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { BucketJoinProcCtx context = (BucketJoinProcCtx) procCtx; MapJoinOperator mapJoinOperator = (MapJoinOperator) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java index 8f40998..ac39430 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java @@ -23,7 +23,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -104,7 +105,7 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { private NodeProcessor getDefaultProc() { return new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } @@ -379,7 +380,7 @@ else if (tableTag != columnTableMappings[colNumber]) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // We should not use this optimization if sorted dynamic partition optimizer is used, @@ -410,8 +411,8 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, return null; } - if(stack.get(0) instanceof TableScanOperator) { - TableScanOperator tso = ((TableScanOperator)stack.get(0)); + if(stack.getLast() instanceof TableScanOperator) { + TableScanOperator tso = ((TableScanOperator)stack.getLast()); if(AcidUtils.isAcidTable(tso.getConf().getTableMetadata())) { /*ACID tables have complex directory layout and require merging of delta files * on read thus we should not try to read bucket files directly*/ diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java index 00ec03e..59eada9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java @@ -27,7 +27,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -100,7 +101,7 @@ private ColumnPrunerProcFactory() { */ public static class ColumnPrunerFilterProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { FilterOperator op = (FilterOperator) nd; ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; @@ -131,7 +132,7 @@ public static ColumnPrunerFilterProc getFilterProc() { */ public static class ColumnPrunerGroupByProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { GroupByOperator gbOp = (GroupByOperator) nd; ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; @@ -222,7 +223,7 @@ public static ColumnPrunerGroupByProc getGroupByProc() { public static class ColumnPrunerScriptProc implements NodeProcessor { @Override @SuppressWarnings("unchecked") - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; @@ -277,7 +278,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, public static class ColumnPrunerLimitProc extends ColumnPrunerDefaultProc { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { super.process(nd, stack, ctx, nodeOutputs); List cols = ((ColumnPrunerProcCtx) ctx).getPrunedColLists().get(nd); @@ -306,7 +307,7 @@ public static ColumnPrunerScriptProc getScriptProc() { */ public static class ColumnPrunerPTFProc extends ColumnPrunerScriptProc { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; @@ -454,7 +455,7 @@ public static ColumnPrunerPTFProc getPTFProc() { */ public static class ColumnPrunerDefaultProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; cppCtx.getPrunedColLists().put((Operator) nd, @@ -479,7 +480,7 @@ public static ColumnPrunerDefaultProc getDefaultProc() { */ public static class ColumnPrunerTableScanProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { TableScanOperator scanOp = (TableScanOperator) nd; ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; @@ -567,7 +568,7 @@ public static ColumnPrunerTableScanProc getTableScanProc() { */ public static class ColumnPrunerReduceSinkProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator op = (ReduceSinkOperator) nd; ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; @@ -647,7 +648,7 @@ public static ColumnPrunerReduceSinkProc getReduceSinkProc() { */ public static class ColumnPrunerLateralViewJoinProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { LateralViewJoinOperator op = (LateralViewJoinOperator) nd; ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; @@ -698,7 +699,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, */ public static class ColumnPrunerLateralViewForwardProc extends ColumnPrunerDefaultProc { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { super.process(nd, stack, ctx, nodeOutputs); LateralViewForwardOperator op = (LateralViewForwardOperator) nd; @@ -740,7 +741,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, */ public static class ColumnPrunerSelectProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { SelectOperator op = (SelectOperator) nd; ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; @@ -959,7 +960,7 @@ public static ColumnPrunerLateralViewForwardProc getLateralViewForwardProc() { */ public static class ColumnPrunerJoinProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { JoinOperator op = (JoinOperator) nd; pruneJoinOperator(ctx, op, op.getConf(), op.getColumnExprMap(), null, @@ -982,7 +983,7 @@ public static ColumnPrunerJoinProc getJoinProc() { */ public static class ColumnPrunerMapJoinProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { AbstractMapJoinOperator op = (AbstractMapJoinOperator) nd; pruneJoinOperator(ctx, op, op.getConf(), op.getColumnExprMap(), op @@ -1005,7 +1006,7 @@ public static ColumnPrunerUnionProc getUnionProc() { */ public static class ColumnPrunerUnionProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; UnionOperator op = (UnionOperator) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java index 517ce31..821998f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java @@ -26,7 +26,8 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -1040,7 +1041,7 @@ private static void foldOperator(Operator op, */ public static class ConstantPropagateFilterProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { FilterOperator op = (FilterOperator) nd; ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; @@ -1095,7 +1096,7 @@ public static ConstantPropagateFilterProc getFilterProc() { */ public static class ConstantPropagateGroupByProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { GroupByOperator op = (GroupByOperator) nd; ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; @@ -1145,7 +1146,7 @@ public static ConstantPropagateGroupByProc getGroupByProc() { public static class ConstantPropagateDefaultProc implements NodeProcessor { @Override @SuppressWarnings("unchecked") - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; Operator op = (Operator) nd; @@ -1184,7 +1185,7 @@ public static ConstantPropagateDefaultProc getDefaultProc() { */ public static class ConstantPropagateSelectProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { SelectOperator op = (SelectOperator) nd; ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; @@ -1246,7 +1247,7 @@ public static ConstantPropagateSelectProc getSelectProc() { */ public static class ConstantPropagateFileSinkProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { FileSinkOperator op = (FileSinkOperator) nd; ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; @@ -1297,7 +1298,7 @@ public static NodeProcessor getFileSinkProc() { */ public static class ConstantPropagateStopProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { Operator op = (Operator) nd; ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; @@ -1320,7 +1321,7 @@ public static NodeProcessor getStopProc() { */ public static class ConstantPropagateReduceSinkProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator op = (ReduceSinkOperator) nd; ReduceSinkDesc rsDesc = op.getConf(); @@ -1424,7 +1425,7 @@ public static NodeProcessor getReduceSinkProc() { */ public static class ConstantPropagateJoinProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { JoinOperator op = (JoinOperator) nd; JoinDesc conf = op.getConf(); @@ -1493,7 +1494,7 @@ public static NodeProcessor getJoinProc() { */ public static class ConstantPropagateTableScanProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { TableScanOperator op = (TableScanOperator) nd; TableScanDesc conf = op.getConf(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java index e68618a..b3dcd3e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java @@ -24,7 +24,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -87,7 +88,7 @@ * might as well do it here. */ public Object - process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) + process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { OptimizeTezProcContext context = (OptimizeTezProcContext) procCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java index 3085f5d..4726f0d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java @@ -24,7 +24,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -39,6 +40,7 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.spark.SparkPartitionPruningSinkDesc; @@ -79,15 +81,15 @@ * found */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprNodeDynamicListDesc desc = (ExprNodeDynamicListDesc) nd; DynamicPartitionPrunerContext context = (DynamicPartitionPrunerContext) procCtx; // Rule is searching for dynamic pruning expr. There's at least an IN // expression wrapping it. - ExprNodeDesc parent = (ExprNodeDesc) stack.get(stack.size() - 2); - ExprNodeDesc grandParent = stack.size() >= 3 ? (ExprNodeDesc) stack.get(stack.size() - 3) : null; + ExprNodeDesc parent = (ExprNodeDesc) Utils.get(stack, stack.size() - 2); + ExprNodeDesc grandParent = stack.size() >= 3 ? (ExprNodeDesc) Utils.get(stack, stack.size() - 3) : null; context.addDynamicList(desc, parent, grandParent, (ReduceSinkOperator) desc.getSource()); @@ -126,7 +128,7 @@ public void addDynamicList(ExprNodeDynamicListDesc desc, ExprNodeDesc parent, } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ParseContext parseContext; if (procCtx instanceof OptimizeTezProcContext) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java index b853a06..ec9c8d9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java @@ -22,7 +22,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -73,7 +74,7 @@ public FixedBucketPruningOptimizer(boolean compat) { public class NoopWalker implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // do nothing return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java index a231543..8614ab7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java @@ -23,7 +23,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -63,7 +64,7 @@ public GenMRFileSink1() { * context */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx opProcCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { GenMRProcContext ctx = (GenMRProcContext) opProcCtx; ParseContext parseCtx = ctx.getParseCtx(); @@ -167,7 +168,7 @@ private void processLinkedFileDesc(GenMRProcContext ctx, * @return the final file name to which the FileSinkOperator should store. * @throws SemanticException */ - private Path processFS(FileSinkOperator fsOp, Stack stack, + private Path processFS(FileSinkOperator fsOp, Deque stack, NodeProcessorCtx opProcCtx, boolean chDir) throws SemanticException { GenMRProcContext ctx = (GenMRProcContext) opProcCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java index 4f3eb06..6be362b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java @@ -19,12 +19,13 @@ package org.apache.hadoop.hive.ql.optimizer; import java.util.Map; -import java.util.Stack; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; @@ -45,13 +46,13 @@ public GenMROperator() { * @param procCtx * context */ - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { GenMRProcContext ctx = (GenMRProcContext) procCtx; Map, GenMapRedCtx> mapCurrCtx = ctx .getMapCurrCtx(); - GenMapRedCtx mapredCtx = mapCurrCtx.get(stack.get(stack.size() - 2)); + GenMapRedCtx mapredCtx = mapCurrCtx.get(Utils.get(stack, stack.size() - 2)); mapCurrCtx.put((Operator) nd, new GenMapRedCtx( mapredCtx.getCurrTask(), mapredCtx.getCurrAliasId())); return true; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java index bc8fed4..d78ffd5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java @@ -20,7 +20,7 @@ import java.io.Serializable; import java.util.Map; -import java.util.Stack; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.MapredWork; @@ -53,14 +54,14 @@ public GenMRRedSink1() { * @param opProcCtx * context */ - public Object process(Node nd, Stack stack, NodeProcessorCtx opProcCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator op = (ReduceSinkOperator) nd; GenMRProcContext ctx = (GenMRProcContext) opProcCtx; Map, GenMapRedCtx> mapCurrCtx = ctx .getMapCurrCtx(); - GenMapRedCtx mapredCtx = mapCurrCtx.get(stack.get(stack.size() - 2)); + GenMapRedCtx mapredCtx = mapCurrCtx.get(Utils.get(stack, stack.size() - 2)); Task currTask = mapredCtx.getCurrTask(); MapredWork currPlan = (MapredWork) currTask.getWork(); String currAliasId = mapredCtx.getCurrAliasId(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java index 6c34bfe..19e4df9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java @@ -20,7 +20,8 @@ import java.io.Serializable; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -48,7 +49,7 @@ public GenMRRedSink2() { * @param opProcCtx * context */ - public Object process(Node nd, Stack stack, NodeProcessorCtx opProcCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator op = (ReduceSinkOperator) nd; GenMRProcContext ctx = (GenMRProcContext) opProcCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java index e4f6292..d01f445 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java @@ -21,7 +21,7 @@ import java.io.Serializable; import java.util.HashMap; import java.util.Map; -import java.util.Stack; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -52,7 +52,7 @@ public GenMRRedSink3() { * @param opProcCtx * context */ - public Object process(Node nd, Stack stack, NodeProcessorCtx opProcCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator op = (ReduceSinkOperator) nd; GenMRProcContext ctx = (GenMRProcContext) opProcCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java index 9297a0b..b253232 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java @@ -23,7 +23,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -66,7 +67,7 @@ public GenMRTableScan1() { * context */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx opProcCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { TableScanOperator op = (TableScanOperator) nd; GenMRProcContext ctx = (GenMRProcContext) opProcCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java index 5102d19..0426f88 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java @@ -20,7 +20,8 @@ import java.io.Serializable; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.Context; @@ -58,7 +59,7 @@ public GenMRUnion1() { * @return * @throws SemanticException */ - private Object processMapOnlyUnion(UnionOperator union, Stack stack, + private Object processMapOnlyUnion(UnionOperator union, Deque stack, GenMRProcContext ctx, UnionProcContext uCtx) throws SemanticException { // merge currTask from multiple topOps @@ -164,7 +165,7 @@ private void processSubQueryUnionCreateIntermediate( * @throws SemanticException */ private void processSubQueryUnionMerge(GenMRProcContext ctx, - GenMRUnionCtx uCtxTask, UnionOperator union, Stack stack) + GenMRUnionCtx uCtxTask, UnionOperator union, Deque stack) throws SemanticException { // The current plan can be thrown away after being merged with the union // plan @@ -190,7 +191,7 @@ private void processSubQueryUnionMerge(GenMRProcContext ctx, * context */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx opProcCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { UnionOperator union = (UnionOperator) nd; GenMRProcContext ctx = (GenMRProcContext) opProcCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java index 3233157..8b92fd0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java @@ -26,8 +26,10 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; +import org.apache.hadoop.hive.ql.lib.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -119,7 +121,7 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { private NodeProcessor getDefaultProc() { return new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } @@ -169,7 +171,7 @@ protected boolean checkGroupByOperatorProcessed( } protected void processGroupBy(GroupByOptimizerContext ctx, - Stack stack, + Deque stack, GroupByOperator groupByOp, int depth) throws SemanticException { HiveConf hiveConf = ctx.getConf(); @@ -253,10 +255,10 @@ else if (setBucketGroup) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // GBY,RS,GBY... (top to bottom) - GroupByOperator groupByOp = (GroupByOperator) stack.get(stack.size() - 3); + GroupByOperator groupByOp = (GroupByOperator) Utils.get(stack, stack.size() - 3); GroupByOptimizerContext ctx = (GroupByOptimizerContext) procCtx; @@ -268,7 +270,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, // Should this group by be converted to a map-side group by, because the grouping keys for // the base table for the group by matches the skewed keys - protected GroupByOptimizerSortMatch checkSortGroupBy(Stack stack, + protected GroupByOptimizerSortMatch checkSortGroupBy(Deque stack, GroupByOperator groupByOp) throws SemanticException { @@ -298,7 +300,7 @@ protected GroupByOptimizerSortMatch checkSortGroupBy(Stack stack, // currOp now points to the top-most tablescan operator TableScanOperator tableScanOp = (TableScanOperator) currOp; int stackPos = 0; - assert stack.get(0) == tableScanOp; + assert stack.peekLast() == tableScanOp; // Create a mapping from the group by columns to the table columns Map tableColsMapping = new HashMap(); @@ -311,7 +313,7 @@ protected GroupByOptimizerSortMatch checkSortGroupBy(Stack stack, while (currOp != groupByOp) { Operator processOp = currOp; Set newConstantCols = new HashSet(); - currOp = (Operator) (stack.get(++stackPos)); + currOp = (Operator) (Utils.get(stack, ++stackPos)); // Filters don't change the column names - so, no need to do anything for them if (processOp instanceof SelectOperator) { @@ -586,10 +588,10 @@ public SortGroupBySkewProcessor(ParseContext pGraphContext) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // GBY,RS,GBY,RS,GBY... (top to bottom) - GroupByOperator groupByOp = (GroupByOperator) stack.get(stack.size() - 5); + GroupByOperator groupByOp = (GroupByOperator) Utils.get(stack, stack.size() - 5); GroupByOptimizerContext ctx = (GroupByOptimizerContext) procCtx; if (!checkGroupByOperatorProcessed(ctx, groupByOp)) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java index 48021cb9..eecfaee 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java @@ -22,7 +22,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import com.google.common.base.Predicates; import com.google.common.collect.Iterators; @@ -96,7 +97,7 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { private static class ProjectRemover implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { SelectOperator sel = (SelectOperator)nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/LimitPushdownOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/LimitPushdownOptimizer.java index 9bf197b..a5aed57 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/LimitPushdownOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/LimitPushdownOptimizer.java @@ -19,10 +19,12 @@ package org.apache.hadoop.hive.ql.optimizer; import java.util.ArrayList; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.GroupByOperator; @@ -30,15 +32,7 @@ import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorUtils; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; -import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; -import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; -import org.apache.hadoop.hive.ql.lib.Dispatcher; -import org.apache.hadoop.hive.ql.lib.GraphWalker; -import org.apache.hadoop.hive.ql.lib.Node; -import org.apache.hadoop.hive.ql.lib.NodeProcessor; -import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; -import org.apache.hadoop.hive.ql.lib.Rule; -import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.lib.*; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -116,11 +110,11 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { private static class TopNReducer implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator rs = null; for (int i = stack.size() - 2 ; i >= 0; i--) { - Operator operator = (Operator) stack.get(i); + Operator operator = (Operator) Utils.get(stack, i); if (operator.getNumChild() != 1) { return false; // multi-GBY single-RS (TODO) } @@ -153,7 +147,7 @@ public Object process(Node nd, Stack stack, private static class TopNPropagator implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator cRS = (ReduceSinkOperator) nd; if (cRS.getConf().getTopN() == -1) { @@ -161,8 +155,9 @@ public Object process(Node nd, Stack stack, return false; } ReduceSinkOperator pRS = null; + Node[] array = stack.toArray(new Node[stack.size()]); for (int i = stack.size() - 2 ; i >= 0; i--) { - Operator operator = (Operator) stack.get(i); + Operator operator = (Operator) array[i]; if (operator.getNumChild() != 1) { return false; // multi-GBY single-RS (TODO) } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java index 867a1f3..4ce580a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java @@ -21,7 +21,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.BucketMapJoinContext; @@ -46,12 +47,12 @@ public final class MapJoinFactory { public static int getPositionParent(AbstractMapJoinOperator op, - Stack stack) { + Deque stack) { int pos = 0; int size = stack.size(); - assert size >= 2 && stack.get(size - 1) == op; + assert size >= 2 && Utils.get(stack, size - 1) == op; Operator parent = - (Operator) stack.get(size - 2); + (Operator) Utils.get(stack, size - 2); List> parOp = op.getParentOperators(); pos = parOp.indexOf(parent); assert pos < parOp.size(); @@ -188,7 +189,7 @@ private static void joinMapJoinPlan(Task oldTask, * the map join operator is enhanced to contain the bucketing info. when it is encountered. */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { AbstractMapJoinOperator mapJoin = (AbstractMapJoinOperator) nd; GenMRProcContext ctx = (GenMRProcContext) procCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java index b2893e7..a1b16a5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java @@ -27,7 +27,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -742,7 +743,7 @@ public ParseContext transform(ParseContext pactx) throws SemanticException { * Store the current mapjoin in the context. */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { MapJoinWalkerCtx ctx = (MapJoinWalkerCtx) procCtx; @@ -852,7 +853,7 @@ private static void addRejectMapJoinToCtx(MapJoinWalkerCtx ctx, * Store the current mapjoin in a list of mapjoins followed by a filesink. */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { MapJoinWalkerCtx ctx = (MapJoinWalkerCtx) procCtx; @@ -879,7 +880,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, * Store the mapjoin in a rejected list. */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { MapJoinWalkerCtx ctx = (MapJoinWalkerCtx) procCtx; AbstractMapJoinOperator mapJoin = ctx.getCurrMapJoinOp(); @@ -898,7 +899,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, * Nothing to do. */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/MergeJoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/MergeJoinProc.java index 5b73866..5c5f3c9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/MergeJoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/MergeJoinProc.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.optimizer; -import java.util.Stack; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator; import org.apache.hadoop.hive.ql.exec.DummyStoreOperator; @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.parse.GenTezProcContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.BaseWork; @@ -40,11 +41,11 @@ public class MergeJoinProc implements NodeProcessor { @Override public Object - process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) + process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { GenTezProcContext context = (GenTezProcContext) procCtx; CommonMergeJoinOperator mergeJoinOp = (CommonMergeJoinOperator) nd; - if (stack.size() < 2 || !(stack.get(stack.size() - 2) instanceof DummyStoreOperator)) { + if (stack.size() < 2 || !(Utils.get(stack, stack.size() - 2) instanceof DummyStoreOperator)) { context.currentMergeJoinOperator = mergeJoinOp; return null; } @@ -52,7 +53,7 @@ TezWork tezWork = context.currentTask.getWork(); @SuppressWarnings("unchecked") Operator parentOp = - (Operator) ((stack.get(stack.size() - 2))); + (Operator) Utils.get(stack, stack.size() - 2); // Guaranteed to be just 1 because each DummyStoreOperator can be part of only one work. BaseWork parentWork = context.childToWorkMap.get(parentOp).get(0); @@ -88,7 +89,7 @@ tezWork.remove(parentWork); - DummyStoreOperator dummyOp = (DummyStoreOperator) (stack.get(stack.size() - 2)); + DummyStoreOperator dummyOp = (DummyStoreOperator) Utils.get(stack, stack.size() - 2); parentWork.setTag(mergeJoinOp.getTagForOperator(dummyOp)); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java index de4d0e4..488d35d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java @@ -22,11 +22,13 @@ import java.util.Collection; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; @@ -42,6 +44,7 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; @@ -82,10 +85,10 @@ public SelectDedup (ParseContext pctx) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { SelectOperator cSEL = (SelectOperator) nd; - SelectOperator pSEL = (SelectOperator) stack.get(stack.size() - 2); + SelectOperator pSEL = (SelectOperator) Utils.get(stack, stack.size() - 2); if (pSEL.getNumChild() > 1) { return null; // possible if all children have same expressions, but not likely. } @@ -214,10 +217,10 @@ private void fixContextReferences(SelectOperator cSEL, SelectOperator pSEL) { private class FilterDedup implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator cFIL = (FilterOperator) nd; - FilterOperator pFIL = (FilterOperator) stack.get(stack.size() - 2); + FilterOperator pFIL = (FilterOperator) Utils.get(stack, stack.size() - 2); // Sampling predicates can be merged with predicates from children because PPD/PPR is // already applied. But to clarify the intention of sampling, just skips merging. diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/PartitionColumnsSeparator.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/PartitionColumnsSeparator.java index beb1884..2658d6f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/PartitionColumnsSeparator.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/PartitionColumnsSeparator.java @@ -26,7 +26,8 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -90,7 +91,7 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { private class StructInTransformer implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filterOp = (FilterOperator) nd; ExprNodeDesc predicate = filterOp.getConf().getPredicate(); @@ -339,7 +340,7 @@ private String getTableAlias(ExprNodeDesc en) { * containing only partition columns. */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprNodeGenericFuncDesc fd = getInExprNode((ExprNodeDesc)nd); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/PointLookupOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/PointLookupOptimizer.java index 44972bf..b170628 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/PointLookupOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/PointLookupOptimizer.java @@ -22,7 +22,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.calcite.util.Pair; import org.slf4j.Logger; @@ -99,7 +100,7 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { private class FilterTransformer implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filterOp = (FilterOperator) nd; ExprNodeDesc predicate = filterOp.getConf().getPredicate(); @@ -138,7 +139,7 @@ private ExprNodeDesc generateInClause(ExprNodeDesc predicate) throws SemanticExc private class OrExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprNodeGenericFuncDesc fd = (ExprNodeGenericFuncDesc) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java index 306e714..0c08e77 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.optimizer; import java.util.ArrayList; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.lib.Node; @@ -47,7 +48,7 @@ public static class GenericFuncExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprNodeDesc newfd = null; @@ -111,7 +112,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class FieldExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprNodeFieldDesc fnd = (ExprNodeFieldDesc) nd; @@ -148,7 +149,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static abstract class ColumnExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprNodeDesc newcd = null; @@ -177,7 +178,7 @@ protected abstract ExprNodeDesc processColumnDesc(NodeProcessorCtx procCtx, public static class DefaultExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { if (nd instanceof ExprNodeConstantDesc) { return ((ExprNodeConstantDesc) nd).clone(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerOperatorFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerOperatorFactory.java index 51464e5..8a44e30 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerOperatorFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerOperatorFactory.java @@ -19,7 +19,8 @@ import java.util.HashMap; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -51,7 +52,7 @@ public static abstract class FilterPruner implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator fop = (FilterOperator) nd; FilterOperator fop2 = null; @@ -175,7 +176,7 @@ protected void addPruningPred(Map> public static class DefaultPruner implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Nothing needs to be done. return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java index 3a6baca..be7447c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java @@ -23,10 +23,12 @@ import java.util.ArrayList; import java.util.EnumSet; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -40,6 +42,7 @@ import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.parse.GenTezProcContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.BaseWork; @@ -77,7 +80,7 @@ * or reduce work. */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procContext, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { GenTezProcContext context = (GenTezProcContext) procContext; MapJoinOperator mapJoinOp = (MapJoinOperator)nd; @@ -89,11 +92,11 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procContext, } boolean isBigTable = stack.size() < 2 - || !(stack.get(stack.size() - 2) instanceof ReduceSinkOperator); + || !(Utils.get(stack, stack.size() - 2) instanceof ReduceSinkOperator); ReduceSinkOperator parentRS = null; if (!isBigTable) { - parentRS = (ReduceSinkOperator)stack.get(stack.size() - 2); + parentRS = (ReduceSinkOperator)Utils.get(stack, stack.size() - 2); // For dynamic partitioned hash join, the big table will also be coming from a ReduceSinkOperator // Check for this condition. diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/RedundantDynamicPruningConditionsRemoval.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/RedundantDynamicPruningConditionsRemoval.java index b8a60f9..d7fa9bc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/RedundantDynamicPruningConditionsRemoval.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/RedundantDynamicPruningConditionsRemoval.java @@ -18,10 +18,12 @@ package org.apache.hadoop.hive.ql.optimizer; import java.util.ArrayList; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.calcite.util.Pair; import org.apache.hadoop.hive.conf.HiveConf; @@ -37,6 +39,7 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -95,12 +98,12 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { private class FilterTransformer implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filter = (FilterOperator) nd; FilterDesc desc = filter.getConf(); - TableScanOperator ts = (TableScanOperator) stack.get(stack.size() - 2); + TableScanOperator ts = (TableScanOperator) Utils.get(stack, stack.size() - 2); // collect CollectContext removalContext = new CollectContext(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/RemoveDynamicPruningBySize.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/RemoveDynamicPruningBySize.java index d8b76e1..305e35b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/RemoveDynamicPruningBySize.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/RemoveDynamicPruningBySize.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.optimizer; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,7 +43,7 @@ static final private Logger LOG = LoggerFactory.getLogger(RemoveDynamicPruningBySize.class.getName()); @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procContext, + public Object process(Node nd, Deque stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java index dd679db..c9649c0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java @@ -23,10 +23,13 @@ import java.util.Arrays; import java.util.Collection; import java.util.HashMap; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; +import org.apache.hadoop.hive.ql.lib.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; @@ -131,7 +134,7 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { public static class FilterPPR implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filOp = (FilterOperator) nd; FilterDesc filOpDesc = filOp.getConf(); @@ -141,10 +144,10 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, return null; } - assert (stack.size() == 3 && stack.get(1) instanceof FilterOperator) || + assert (stack.size() == 3 && Utils.get(stack, 1) instanceof FilterOperator) || stack.size() == 2; - TableScanOperator tsOp = (TableScanOperator) stack.get(0); + TableScanOperator tsOp = (TableScanOperator) Utils.get(stack, 0); ((SamplePrunerCtx) procCtx).getOpToSamplePruner().put(tsOp, sampleDescr); return null; } @@ -161,7 +164,7 @@ public static NodeProcessor getFilterProc() { public static class DefaultPPR implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Nothing needs to be done. return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java index 60a8604..ac68977 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java @@ -20,7 +20,8 @@ import java.util.Collection; import java.util.EnumSet; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -52,7 +53,7 @@ @SuppressWarnings("unchecked") @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchAggregation.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchAggregation.java index fbcb779..45054cb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchAggregation.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchAggregation.java @@ -19,10 +19,12 @@ package org.apache.hadoop.hive.ql.optimizer; import java.util.ArrayList; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; @@ -31,15 +33,7 @@ import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; import org.apache.hadoop.hive.ql.exec.SelectOperator; -import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; -import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; -import org.apache.hadoop.hive.ql.lib.Dispatcher; -import org.apache.hadoop.hive.ql.lib.GraphWalker; -import org.apache.hadoop.hive.ql.lib.Node; -import org.apache.hadoop.hive.ql.lib.NodeProcessor; -import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; -import org.apache.hadoop.hive.ql.lib.Rule; -import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.lib.*; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.AggregationDesc; @@ -86,16 +80,16 @@ public SingleGBYProcessor(ParseContext pctx) { this.pctx = pctx; } - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FileSinkOperator FS = (FileSinkOperator) nd; - int shift = stack.get(stack.size() - 2) instanceof SelectOperator ? 0 : 1; - GroupByOperator cGBY = (GroupByOperator) stack.get(stack.size() - 3 + shift); - ReduceSinkOperator RS = (ReduceSinkOperator) stack.get(stack.size() - 4 + shift); + int shift = Utils.get(stack, stack.size() - 2) instanceof SelectOperator ? 0 : 1; + GroupByOperator cGBY = (GroupByOperator) Utils.get(stack, stack.size() - 3 + shift); + ReduceSinkOperator RS = (ReduceSinkOperator) Utils.get(stack, stack.size() - 4 + shift); if (RS.getConf().getNumReducers() != 1 || !RS.getConf().getKeyCols().isEmpty()) { return null; } - GroupByOperator pGBY = (GroupByOperator) stack.get(stack.size() - 5 + shift); + GroupByOperator pGBY = (GroupByOperator) Utils.get(stack, stack.size() - 5 + shift); Path fileName = FS.getConf().getFinalDirName(); TableDesc tsDesc = createIntermediateFS(pGBY, fileName); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java index 5e30910..f6e7870 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java @@ -26,7 +26,8 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; @@ -88,7 +89,7 @@ public SkewJoinProc(ParseContext parseContext) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // We should be having a tree which looks like this // TS -> * -> RS - diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java index 926386b..e5dd3cb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java @@ -25,7 +25,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; @@ -117,7 +118,7 @@ public SortedDynamicPartitionProc(ParseContext pCtx) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // introduce RS and EX before FS. If the operator tree already contains diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionTimeGranularityOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionTimeGranularityOptimizer.java index 7a4f22a..d14cede 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionTimeGranularityOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionTimeGranularityOptimizer.java @@ -76,7 +76,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; /** * Introduces a RS before FS to partition data by configuration specified @@ -118,7 +119,7 @@ public SortedDynamicPartitionProc(ParseContext pCtx) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // introduce RS and EX before FS diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java index 6aa5335..eed7a9d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java @@ -19,10 +19,13 @@ package org.apache.hadoop.hive.ql.optimizer; import java.util.ArrayList; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; +import org.apache.hadoop.hive.ql.lib.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -30,15 +33,6 @@ import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; -import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; -import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; -import org.apache.hadoop.hive.ql.lib.Dispatcher; -import org.apache.hadoop.hive.ql.lib.GraphWalker; -import org.apache.hadoop.hive.ql.lib.Node; -import org.apache.hadoop.hive.ql.lib.NodeProcessor; -import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; -import org.apache.hadoop.hive.ql.lib.Rule; -import org.apache.hadoop.hive.ql.lib.RuleRegExp; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; @@ -122,7 +116,7 @@ private NodeProcessor getSortedMergeJoinProc(ParseContext pctx) { private NodeProcessor getDefaultProc() { return new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; @@ -135,13 +129,13 @@ public Object process(Node nd, Stack stack, private NodeProcessor getCheckCandidateJoin() { return new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { SortBucketJoinProcCtx smbJoinContext = (SortBucketJoinProcCtx)procCtx; JoinOperator joinOperator = (JoinOperator)nd; int size = stack.size(); - if (!(stack.get(size-1) instanceof JoinOperator) || - !(stack.get(size-2) instanceof ReduceSinkOperator)) { + if (!(Utils.get(stack, size-1) instanceof JoinOperator) || + !(Utils.get(stack, size-2) instanceof ReduceSinkOperator)) { smbJoinContext.getRejectedJoinOps().add(joinOperator); return null; } @@ -149,7 +143,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, // If any operator in the stack does not support a auto-conversion, this join should // not be converted. for (int pos = size -3; pos >= 0; pos--) { - Operator op = (Operator)stack.get(pos); + Operator op = (Operator)Utils.get(stack, pos); if (!op.supportAutomaticSortMergeJoin()) { smbJoinContext.getRejectedJoinOps().add(joinOperator); return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapjoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapjoinProc.java index f6ca039..9d3af34 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapjoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapjoinProc.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.optimizer; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -39,7 +40,7 @@ public SortedMergeBucketMapjoinProc() { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { if (nd instanceof SMBMapJoinOperator) { return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java index d090598..ab393a9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.optimizer; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.lib.Node; @@ -37,7 +38,7 @@ public SortedMergeJoinProc() { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { JoinOperator joinOp = (JoinOperator) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkRemoveDynamicPruningBySize.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkRemoveDynamicPruningBySize.java index 26a1088..d35413a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkRemoveDynamicPruningBySize.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkRemoveDynamicPruningBySize.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.optimizer; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.OperatorUtils; import org.slf4j.Logger; @@ -43,7 +44,7 @@ static final private Logger LOG = LoggerFactory.getLogger(SparkRemoveDynamicPruningBySize.class.getName()); @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procContext, + public Object process(Node nd, Deque stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java index 32d1de1..42b3b51 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java @@ -22,12 +22,14 @@ import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -55,6 +57,7 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -236,7 +239,7 @@ private GbyKeyType getGbyKeyType(GroupByOperator gbyOp) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // 1. Do few checks to determine eligibility of optimization @@ -261,7 +264,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, boolean isOptimized = false; try { - TableScanOperator tsOp = (TableScanOperator) stack.get(0); + TableScanOperator tsOp = (TableScanOperator) stack.peek(); if (tsOp.getNumParent() > 0) { // looks like a subq plan. return null; @@ -281,7 +284,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, if (rowCnt == null) { return null; } - SelectOperator pselOp = (SelectOperator)stack.get(1); + SelectOperator pselOp = (SelectOperator)Utils.get(stack, 1); for(ExprNodeDesc desc : pselOp.getConf().getColList()) { if (!((desc instanceof ExprNodeColumnDesc) || (desc instanceof ExprNodeConstantDesc))) { // Probably an expression, cant handle that @@ -291,7 +294,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Map exprMap = pselOp.getColumnExprMap(); // Since we have done an exact match on TS-SEL-GBY-RS-GBY-(SEL)-FS // we need not to do any instanceof checks for following. - GroupByOperator pgbyOp = (GroupByOperator)stack.get(2); + GroupByOperator pgbyOp = (GroupByOperator)Utils.get(stack, 2); if (getGbyKeyType(pgbyOp) == GbyKeyType.OTHER) { return null; } @@ -300,13 +303,13 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, else if (getGbyKeyType(pgbyOp) == GbyKeyType.CONSTANT && rowCnt == 0) { return null; } - ReduceSinkOperator rsOp = (ReduceSinkOperator)stack.get(3); + ReduceSinkOperator rsOp = (ReduceSinkOperator)Utils.get(stack, 3); if (rsOp.getConf().getDistinctColumnIndices().size() > 0) { // we can't handle distinct return null; } - GroupByOperator cgbyOp = (GroupByOperator)stack.get(4); + GroupByOperator cgbyOp = (GroupByOperator)Utils.get(stack, 4); if (getGbyKeyType(cgbyOp) == GbyKeyType.OTHER) { return null; } @@ -315,7 +318,7 @@ else if (getGbyKeyType(pgbyOp) == GbyKeyType.CONSTANT && rowCnt == 0) { else if (getGbyKeyType(cgbyOp) == GbyKeyType.CONSTANT && rowCnt == 0) { return null; } - Operator last = (Operator) stack.get(5); + Operator last = (Operator) Utils.get(stack, 5); SelectOperator cselOp = null; Map posToConstant = new HashMap<>(); if (last instanceof SelectOperator) { @@ -334,7 +337,7 @@ else if (getGbyKeyType(cgbyOp) == GbyKeyType.CONSTANT && rowCnt == 0) { } } } - last = (Operator) stack.get(6); + last = (Operator) Utils.get(stack, 6); } FileSinkOperator fsOp = (FileSinkOperator)last; if (fsOp.getNumChild() > 0) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java index b5f4ca3..94557ca 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.JoinOperator; @@ -81,7 +81,7 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { private class JoinAnnotate implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { JoinOperator joinOp = (JoinOperator) nd; @@ -129,7 +129,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, private class TableScanAnnotate implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TableScanOperator tableScanOp = (TableScanOperator) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/JoinCondTypeCheckProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/JoinCondTypeCheckProcFactory.java index cf665ee..2a7fd0a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/JoinCondTypeCheckProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/JoinCondTypeCheckProcFactory.java @@ -18,14 +18,17 @@ package org.apache.hadoop.hive.ql.optimizer.calcite.translator; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; @@ -64,7 +67,7 @@ public static class JoinCondColumnExprProcessor extends ColumnExprProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { JoinTypeCheckCtx ctx = (JoinTypeCheckCtx) procCtx; @@ -73,7 +76,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } ASTNode expr = (ASTNode) nd; - ASTNode parent = stack.size() > 1 ? (ASTNode) stack.get(stack.size() - 2) : null; + ASTNode parent = stack.size() > 1 ? (ASTNode) Utils.get(stack, stack.size() - 2) : null; if (expr.getType() != HiveParser.TOK_TABLE_OR_COL) { ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr), expr); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java index e1927e9..25d9631 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java @@ -28,7 +28,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -569,7 +570,7 @@ private boolean sameOrder(String order1, String order2) { return reduceSinkOperators; } - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { CorrelationNodeProcCtx corrCtx = (CorrelationNodeProcCtx) ctx; ReduceSinkOperator op = (ReduceSinkOperator) nd; @@ -631,7 +632,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, private NodeProcessor getDefaultProc() { return new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { Operator op = (Operator) nd; LOG.info("Walk to operator " + op.getIdentifier() + " " diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/ReduceSinkDeDuplication.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/ReduceSinkDeDuplication.java index d53efbf..1beaf13 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/ReduceSinkDeDuplication.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/ReduceSinkDeDuplication.java @@ -22,11 +22,11 @@ import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVECONVERTJOINNOCONDITIONALTASK; import java.util.ArrayList; +import java.util.Deque; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Stack; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -146,7 +146,7 @@ public static NodeProcessor getDefaultProc() { */ static class DefaultProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } @@ -155,7 +155,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public abstract static class AbsctractReducerReducerProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkDeduplicateProcCtx dedupCtx = (ReduceSinkDeduplicateProcCtx) procCtx; if (dedupCtx.hasBeenRemoved((Operator) nd)) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java index 55b85d9..b7813a9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java @@ -23,7 +23,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -203,7 +204,7 @@ void populateRewriteVars(TableScanOperator topOp) private NodeProcessor getDefaultProc() { return new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java index 04aa299..285f0ba 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java @@ -35,7 +35,8 @@ import java.util.ArrayList; import java.util.List; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; /** * Factory of methods used by {@link RewriteGBUsingIndex} @@ -51,7 +52,7 @@ public static CheckTableScanProc canApplyOnTableScanOperator(TableScanOperator t public CheckTableScanProc() { } - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { RewriteCanApplyCtx canApplyCtx = (RewriteCanApplyCtx) ctx; for (Node node : stack) { @@ -100,7 +101,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object.. return null; } else { ExprNodeDesc expr = ExprNodeDescUtils.backtrack(para.get(0), operator, - (Operator) stack.get(0)); + (Operator) stack.peekLast()); if (!(expr instanceof ExprNodeColumnDesc)) { canApplyCtx.setAggParameterException(true); return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java index 09ef490..0bf2050 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java @@ -25,7 +25,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.exec.ColumnInfo; @@ -71,7 +72,7 @@ public static class ColumnExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprNodeColumnDesc cd = (ExprNodeColumnDesc) nd; @@ -103,7 +104,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class GenericExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { assert (nd instanceof ExprNodeGenericFuncDesc || nd instanceof ExprNodeFieldDesc); @@ -140,7 +141,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class DefaultExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { assert (nd instanceof ExprNodeConstantDesc); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java index d95b45b..6c555f9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java @@ -25,7 +25,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Table; @@ -78,7 +79,7 @@ * @return Operator The parent operator in the current path. */ @SuppressWarnings("unchecked") - protected static Operator getParent(Stack stack) { + protected static Operator getParent(Deque stack) { return (Operator)Utils.getNthAncestor(stack, 1); } @@ -88,7 +89,7 @@ public static class TransformLineage extends DefaultLineage implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // LineageCTx @@ -147,7 +148,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class TableScanLineage extends DefaultLineage implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // LineageCtx @@ -203,7 +204,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, */ public static class JoinLineage extends DefaultLineage implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never @@ -304,7 +305,7 @@ private Predicate getPredicate(JoinOperator jop, LineageCtx lctx) { */ public static class LateralViewJoinLineage extends DefaultLineage implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never @@ -349,7 +350,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, */ public static class SelectLineage extends DefaultLineage implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LineageCtx lctx = (LineageCtx)procCtx; @@ -402,7 +403,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, */ public static class GroupByLineage extends DefaultLineage implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LineageCtx lctx = (LineageCtx)procCtx; @@ -526,7 +527,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, @SuppressWarnings("unchecked") @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never // be called for leafs. @@ -559,7 +560,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class ReduceSinkLineage implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never // be called for leafs. @@ -628,7 +629,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class FilterLineage implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never // be called for leafs. @@ -670,7 +671,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, @SuppressWarnings("unchecked") @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never // be called for leafs. diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/metainfo/annotation/OpTraitsRulesProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/metainfo/annotation/OpTraitsRulesProcFactory.java index 875ee9d..b0671a8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/metainfo/annotation/OpTraitsRulesProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/metainfo/annotation/OpTraitsRulesProcFactory.java @@ -21,7 +21,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Map.Entry; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.exec.GroupByOperator; @@ -71,7 +72,7 @@ public static class DefaultRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { @SuppressWarnings("unchecked") Operator op = (Operator)nd; @@ -88,7 +89,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class ReduceSinkRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator rs = (ReduceSinkOperator)nd; @@ -162,7 +163,7 @@ public boolean checkBucketedTable(Table tbl, ParseContext pGraphContext, } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TableScanOperator ts = (TableScanOperator)nd; AnnotateOpTraitsProcCtx opTraitsCtx = (AnnotateOpTraitsProcCtx)procCtx; @@ -201,7 +202,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class GroupByRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { GroupByOperator gbyOp = (GroupByOperator)nd; List gbyKeys = new ArrayList(); @@ -253,7 +254,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { SelectOperator selOp = (SelectOperator) nd; List> parentBucketColNames = @@ -288,7 +289,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class JoinRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { JoinOperator joinOp = (JoinOperator) nd; List> bucketColsList = new ArrayList>(); @@ -363,7 +364,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class MultiParentRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { @SuppressWarnings("unchecked") Operator operator = (Operator) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java index 461dbe5..fe5f5e9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java @@ -25,7 +25,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; @@ -209,7 +210,7 @@ public NodeInfoWrapper(WalkState state, Boolean[] resultVector, ExprNodeDesc out public static class ColumnExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprNodeColumnDesc cd = (ExprNodeColumnDesc) nd; PcrExprProcCtx epc = (PcrExprProcCtx) procCtx; @@ -251,7 +252,7 @@ public static ExprNodeGenericFuncDesc getOutExpr( */ public static class GenericFuncExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { PcrExprProcCtx ctx = (PcrExprProcCtx) procCtx; ExprNodeGenericFuncDesc fd = (ExprNodeGenericFuncDesc) nd; @@ -499,7 +500,7 @@ private Object handleUdfNot(PcrExprProcCtx ctx, ExprNodeGenericFuncDesc fd, public static class FieldExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprNodeFieldDesc fnd = (ExprNodeFieldDesc) nd; boolean unknown = false; @@ -527,7 +528,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class DefaultExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { if (nd instanceof ExprNodeConstantDesc) { return new NodeInfoWrapper(WalkState.CONSTANT, null, diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java index 65505b3..df36949 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java @@ -19,7 +19,8 @@ package org.apache.hadoop.hive.ql.optimizer.pcr; import java.util.ArrayList; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,7 +60,7 @@ public static class FilterPCR implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { PcrOpWalkerCtx owc = (PcrOpWalkerCtx) procCtx; FilterOperator fop = (FilterOperator) nd; @@ -161,7 +162,7 @@ else if (wrapper.state != PcrExprProcFactory.WalkState.FALSE) { public static class DefaultPCR implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Nothing needs to be done. return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java index 81b527c..3660732 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java @@ -22,7 +22,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.Path; @@ -155,7 +156,7 @@ public long getTotalKnownInputSize(Context context, MapWork currWork, } @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { if (nodeOutputs == null || nodeOutputs.length == 0) { throw new SemanticException("No Dispatch Context"); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java index ee67443..23ba76e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java @@ -24,7 +24,8 @@ import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,7 +67,7 @@ public AnnotateRunTimeStatsDispatcher(PhysicalContext context, Map stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { Task currTask = (Task) nd; Set> ops = new HashSet<>(); @@ -159,10 +160,10 @@ public void resolve(Set> opSet, ParseContext pctx) throws SemanticEx private Set> getAllOperatorsForSimpleFetch(Set> opSet) { Set> returnSet = new LinkedHashSet>(); - Stack> opStack = new Stack>(); + ArrayDeque> opStack = new ArrayDeque>(); // add all children opStack.addAll(opSet); - while (!opStack.empty()) { + while (!opStack.isEmpty()) { Operator op = opStack.pop(); returnSet.add(op); if (op.getChildOperators() != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingOpProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingOpProcFactory.java index 391cfda..5b8f32d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingOpProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingOpProcFactory.java @@ -23,7 +23,8 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; @@ -58,8 +59,8 @@ public static class DefaultInferrer implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, - Object... nodeOutputs) throws SemanticException { + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { return null; } @@ -112,7 +113,7 @@ private static void processForward(Operator op, Bucketin * @return Operator The parent operator in the current path. */ @SuppressWarnings("unchecked") - protected static Operator getParent(Stack stack) { + protected static Operator getParent(Deque stack) { return (Operator)Utils.getNthAncestor(stack, 1); } @@ -129,7 +130,7 @@ private static void processForward(Operator op, Bucketin */ public static class JoinInferrer extends DefaultInferrer implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { BucketingSortingCtx bctx = (BucketingSortingCtx)procCtx; @@ -325,7 +326,7 @@ private static void findBucketingSortingColumns(List exprs, */ public static class SelectInferrer extends DefaultInferrer implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { BucketingSortingCtx bctx = (BucketingSortingCtx)procCtx; @@ -455,7 +456,7 @@ private static int indexOfColName(List bucketSortCols, */ public static class FileSinkInferrer extends DefaultInferrer implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { BucketingSortingCtx bctx = (BucketingSortingCtx)procCtx; @@ -573,7 +574,7 @@ static void extractTraits(BucketingSortingCtx bctx, ReduceSinkOperator rop, Oper public static class MultiGroupByInferrer extends GroupByInferrer implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { BucketingSortingCtx bctx = (BucketingSortingCtx)procCtx; GroupByOperator gop = (GroupByOperator)nd; @@ -615,7 +616,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, */ public static class GroupByInferrer extends DefaultInferrer implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { BucketingSortingCtx bctx = (BucketingSortingCtx)procCtx; @@ -712,7 +713,7 @@ protected Object processGroupBy(Operator parent, GroupBy public static class ForwardingInferrer extends DefaultInferrer implements NodeProcessor { @SuppressWarnings("unchecked") @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { processForward((Operator)nd, (BucketingSortingCtx)procCtx, diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java index 9ad33fd..09e1ecd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java @@ -26,7 +26,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -102,7 +103,7 @@ public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { } @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { @SuppressWarnings("unchecked") Task currTask = (Task) nd; @@ -264,7 +265,7 @@ private void checkForCrossProduct(String taskName, } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { @SuppressWarnings("unchecked") AbstractMapJoinOperator mjOp = (AbstractMapJoinOperator) nd; @@ -340,7 +341,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator rsOp = (ReduceSinkOperator) nd; ReduceSinkDesc rsDesc = rsOp.getConf(); @@ -361,7 +362,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, static class NoopProcessor implements NodeProcessor { @Override - public final Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public final Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return nd; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java index a694cf8..3f1afde 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java @@ -28,14 +28,14 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Deque; import java.util.EnumSet; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import com.google.common.base.Preconditions; import org.apache.hadoop.hive.conf.HiveConf; @@ -126,7 +126,7 @@ public LlapDecisionDispatcher(PhysicalContext pctx, LlapMode mode) { } @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { @SuppressWarnings("unchecked") Task currTask = (Task) nd; @@ -332,7 +332,7 @@ private boolean checkAggregators(Collection aggs) { opRules.put(new RuleRegExp("No scripts", ScriptOperator.getOperatorName() + "%"), new NodeProcessor() { @Override - public Object process(Node n, Stack s, NodeProcessorCtx c, + public Object process(Node n, Deque s, NodeProcessorCtx c, Object... os) { LOG.debug("Cannot run operator [" + n + "] in llap mode."); return new Boolean(false); @@ -341,7 +341,7 @@ public Object process(Node n, Stack s, NodeProcessorCtx c, opRules.put(new RuleRegExp("No user code in fil", FilterOperator.getOperatorName() + "%"), new NodeProcessor() { @Override - public Object process(Node n, Stack s, NodeProcessorCtx c, + public Object process(Node n, Deque s, NodeProcessorCtx c, Object... os) { ExprNodeDesc expr = ((FilterOperator)n).getConf().getPredicate(); Boolean retval = new Boolean(checkExpression(expr)); @@ -354,7 +354,7 @@ public Object process(Node n, Stack s, NodeProcessorCtx c, opRules.put(new RuleRegExp("No user code in gby", GroupByOperator.getOperatorName() + "%"), new NodeProcessor() { @Override - public Object process(Node n, Stack s, NodeProcessorCtx c, + public Object process(Node n, Deque s, NodeProcessorCtx c, Object... os) { @SuppressWarnings("unchecked") List aggs = ((Operator) n).getConf().getAggregators(); @@ -368,7 +368,7 @@ public Object process(Node n, Stack s, NodeProcessorCtx c, opRules.put(new RuleRegExp("No user code in select", SelectOperator.getOperatorName() + "%"), new NodeProcessor() { @Override - public Object process(Node n, Stack s, NodeProcessorCtx c, + public Object process(Node n, Deque s, NodeProcessorCtx c, Object... os) { @SuppressWarnings({ "unchecked" }) List exprs = ((Operator) n).getConf().getColList(); @@ -385,7 +385,7 @@ public Object process(Node n, Stack s, NodeProcessorCtx c, new RuleRegExp("Disable grace hash join if LLAP mode and not dynamic partition hash join", MapJoinOperator.getOperatorName() + "%"), new NodeProcessor() { @Override - public Object process(Node n, Stack s, NodeProcessorCtx c, Object... os) { + public Object process(Node n, Deque s, NodeProcessorCtx c, Object... os) { MapJoinOperator mapJoinOp = (MapJoinOperator) n; if (mapJoinOp.getConf().isHybridHashJoin() && !(mapJoinOp.getConf().isDynamicPartitionHashJoin())) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java index 9ca815c..4bd3939 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java @@ -22,7 +22,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -75,7 +76,7 @@ public static NodeProcessor getGroupByProc() { public static NodeProcessor getDefaultProc() { return new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } @@ -87,7 +88,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, * */ public static class MapJoinFollowedByGroupByProcessor implements NodeProcessor { - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { LocalMapJoinProcCtx context = (LocalMapJoinProcCtx) ctx; if (!nd.getName().equals("GBY")) { @@ -107,7 +108,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object.. * */ public static class LocalMapJoinProcessor implements NodeProcessor { - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { LocalMapJoinProcCtx context = (LocalMapJoinProcCtx) ctx; if (!nd.getName().equals("MAPJOIN")) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java index c0a72b6..7cb3217 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java @@ -26,7 +26,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.Context; @@ -226,7 +227,7 @@ private void processCurrentTask(Task currTask, } @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { Task currTask = (Task) nd; // not map reduce task or not conditional task, just skip diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java index 3a20cfe..7749a10 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java @@ -29,7 +29,8 @@ import java.util.List; import java.util.Set; import java.util.SortedSet; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.TreeSet; import org.slf4j.Logger; @@ -89,7 +90,7 @@ public MemoryCalculator(PhysicalContext pctx) { @SuppressWarnings("unchecked") @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { Task currTask = (Task) nd; if (currTask instanceof StatsTask) { @@ -140,7 +141,7 @@ private void evaluateOperators(BaseWork w, PhysicalContext pctx) throws Semantic rules.put(new RuleRegExp("Map join memory estimator", MapJoinOperator.getOperatorName() + "%"), new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) { mapJoins.add((MapJoinOperator) nd); return null; @@ -277,7 +278,7 @@ private long computeInputSize(MapJoinOperator mj) throws HiveException { public class DefaultRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java index 5758282..911f187 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java @@ -22,7 +22,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -110,7 +111,7 @@ public TableScanProcessor() { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TableScanOperator tsOp = (TableScanOperator) nd; WalkerCtx walkerCtx = (WalkerCtx) procCtx; @@ -129,7 +130,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, static private class FileSinkProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { WalkerCtx walkerCtx = (WalkerCtx) procCtx; // There can be atmost one element eligible to be converted to diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanOptimizer.java index 49b839a..de8afd5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanOptimizer.java @@ -21,7 +21,8 @@ import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -78,7 +79,7 @@ public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { static private class WhereFalseProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filter = (FilterOperator) nd; @@ -117,7 +118,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, static private class Limit0Processor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { if(!(((LimitOperator)nd).getConf().getLimit() == 0)) { @@ -133,7 +134,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, static private class TSMarker implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ((WalkerCtx)procCtx).setMayBeMetadataOnly((TableScanOperator)nd); return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java index 0882ae2..7ac725f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java @@ -37,7 +37,8 @@ import java.util.Map; import java.util.ServiceLoader; import java.util.Map.Entry; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -167,7 +168,7 @@ private String encode(Map partSpec) { } @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { Task task = (Task) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java index dc433fe..894f1de 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java @@ -24,7 +24,8 @@ import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.SerializationUtilities; import org.apache.hadoop.hive.ql.exec.StatsTask; @@ -68,7 +69,7 @@ public Serializer(PhysicalContext pctx) { @SuppressWarnings("unchecked") @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { Task currTask = (Task) nd; if (currTask instanceof StatsTask) { @@ -119,7 +120,7 @@ private void evaluateOperators(BaseWork w, PhysicalContext pctx) throws Semantic rules.put(new RuleRegExp("TS finder", TableScanOperator.getOperatorName() + "%"), new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) { tableScans.add((TableScanOperator) nd); return null; @@ -158,7 +159,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public class DefaultRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java index 58e373e..84b8bb0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java @@ -19,7 +19,8 @@ package org.apache.hadoop.hive.ql.optimizer.physical; import java.io.Serializable; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.Task; @@ -48,7 +49,7 @@ public static NodeProcessor getJoinProc() { * */ public static class SkewJoinJoinProcessor implements NodeProcessor { - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { SkewJoinProcCtx context = (SkewJoinProcCtx) ctx; JoinOperator op = (JoinOperator) nd; @@ -67,7 +68,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, * */ public static class SkewJoinDefaultProcessor implements NodeProcessor { - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { return null; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java index f48d118..de86c6f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java @@ -22,7 +22,8 @@ import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; import org.apache.hadoop.hive.ql.exec.ConditionalTask; @@ -69,7 +70,7 @@ public SkewJoinTaskDispatcher(PhysicalContext context) { } @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { Task task = (Task) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java index 92d2191..f2eb9bd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java @@ -40,7 +40,8 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; /** * Check each MapJoin and ShuffleJoin Operator to see if they are performing a cross product. @@ -56,7 +57,7 @@ public class SparkCrossProductCheck implements PhysicalPlanResolver, Dispatcher { @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { @SuppressWarnings("unchecked") Task currTask = (Task) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkMapJoinResolver.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkMapJoinResolver.java index a3ec990..e7a6e2d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkMapJoinResolver.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkMapJoinResolver.java @@ -27,7 +27,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -94,10 +95,10 @@ private boolean containsOp(SparkWork sparkWork, Class clazz) { Set> ops = new HashSet>(); if (work instanceof MapWork) { Collection> opSet = ((MapWork) work).getAliasToWork().values(); - Stack> opStack = new Stack>(); + ArrayDeque> opStack = new ArrayDeque>(); opStack.addAll(opSet); - while (!opStack.empty()) { + while (!opStack.isEmpty()) { Operator op = opStack.pop(); ops.add(op); if (op.getChildOperators() != null) { @@ -296,7 +297,7 @@ private SparkTask createSparkTask(SparkTask originalTask, } @Override - public Object dispatch(Node nd, Stack stack, Object... nos) + public Object dispatch(Node nd, Deque stack, Object... nos) throws SemanticException { Task currentTask = (Task) nd; if(currentTask.isMapRedTask()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index fadbc20..8f89842 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -26,19 +26,22 @@ import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.regex.Pattern; import org.apache.commons.lang.ArrayUtils; import org.apache.calcite.util.Pair; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.hadoop.hive.ql.lib.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; @@ -84,18 +87,6 @@ import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; -import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; -import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; -import org.apache.hadoop.hive.ql.lib.Dispatcher; -import org.apache.hadoop.hive.ql.lib.GraphWalker; -import org.apache.hadoop.hive.ql.lib.Node; -import org.apache.hadoop.hive.ql.lib.NodeProcessor; -import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; -import org.apache.hadoop.hive.ql.lib.PreOrderOnceWalker; -import org.apache.hadoop.hive.ql.lib.PreOrderWalker; -import org.apache.hadoop.hive.ql.lib.Rule; -import org.apache.hadoop.hive.ql.lib.RuleRegExp; -import org.apache.hadoop.hive.ql.lib.TaskGraphWalker; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -543,7 +534,7 @@ public VectorizationDispatcher(PhysicalContext physicalContext) { } @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { Task currTask = (Task) nd; if (currTask instanceof MapRedTask) { @@ -1284,7 +1275,7 @@ public ClearVectorDescsNodeProcessor() { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { for (Node n : stack) { Operator op = (Operator) n; @@ -1341,7 +1332,7 @@ public MapWorkValidationNodeProcessor(MapWork mapWork, boolean isTezOrSpark) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { for (Node n : stack) { Operator op = (Operator) n; @@ -1384,7 +1375,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { for (Node n : stack) { Operator op = (Operator) n; @@ -1433,7 +1424,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, protected final Map, Operator> opToVectorOpMap = new HashMap, Operator>(); - public VectorizationContext walkStackToFindVectorizationContext(Stack stack, + public VectorizationContext walkStackToFindVectorizationContext(Deque stack, Operator op) throws SemanticException { VectorizationContext vContext = null; if (stack.size() <= 1) { @@ -1448,7 +1439,7 @@ public VectorizationContext walkStackToFindVectorizationContext(Stack stac if (i < 0) { return null; } - Operator opParent = (Operator) stack.get(i); + Operator opParent = (Operator) Utils.get(stack, i); Operator vectorOpParent = opToVectorOpMap.get(opParent); if (vectorOpParent != null) { if (vectorOpParent instanceof VectorizationContextRegion) { @@ -1461,7 +1452,6 @@ public VectorizationContext walkStackToFindVectorizationContext(Stack stac } else { LOG.info("walkStackToFindVectorizationContext " + opParent.getName() + " is not vectorized"); } - --i; } return vContext; } @@ -1485,7 +1475,7 @@ public VectorizationContext walkStackToFindVectorizationContext(Stack stac } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { throw new SemanticException("Must be overridden"); } @@ -1506,7 +1496,7 @@ public MapWorkVectorizationNodeProcessor(MapWork mWork, boolean isTezOrSpark, } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { Operator op = (Operator) nd; @@ -1570,7 +1560,7 @@ public ReduceWorkVectorizationNodeProcessor(VectorTaskColumnInfo vectorTaskColum } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { Operator op = (Operator) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java index 81e99fc..5af9949 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java @@ -25,7 +25,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -74,7 +75,7 @@ public IndexWhereProcessor(Map> tsToIndices) { /** * Process a node of the operator tree. This matches on the rule in IndexWhereTaskDispatcher */ - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TableScanOperator operator = (TableScanOperator) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java index ae96def..88eb01b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java @@ -25,7 +25,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.ql.exec.Operator; @@ -67,7 +68,7 @@ public IndexWhereTaskDispatcher(PhysicalContext context) { } @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { Task task = (Task) nd; @@ -145,7 +146,7 @@ public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) private NodeProcessor getDefaultProcessor() { return new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/CombineEquivalentWorkResolver.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/CombineEquivalentWorkResolver.java index ec192a0..e860359 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/CombineEquivalentWorkResolver.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/CombineEquivalentWorkResolver.java @@ -25,7 +25,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import com.google.common.collect.Maps; import com.google.common.collect.Sets; @@ -77,7 +78,7 @@ public int compare(BaseWork o1, BaseWork o2) { }; @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) throws SemanticException { + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { if (nd instanceof SparkTask) { SparkTask sparkTask = (SparkTask) nd; SparkWork sparkWork = sparkTask.getWork(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SetSparkReducerParallelism.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SetSparkReducerParallelism.java index 337f418..0367bdd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SetSparkReducerParallelism.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SetSparkReducerParallelism.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.optimizer.spark; +import java.util.Deque; import java.util.List; import java.util.Set; import java.util.Stack; @@ -68,7 +69,7 @@ public SetSparkReducerParallelism(HiveConf conf) { } @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkJoinHintOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkJoinHintOptimizer.java index e53c440..3ec5b9f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkJoinHintOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkJoinHintOptimizer.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.optimizer.spark; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.lib.Node; @@ -50,7 +51,7 @@ public SparkJoinHintOptimizer(ParseContext parseCtx) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { OptimizeSparkProcContext context = (OptimizeSparkProcContext) procCtx; HiveConf hiveConf = context.getParseContext().getConf(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkJoinOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkJoinOptimizer.java index d57ceff..1cd190a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkJoinOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkJoinOptimizer.java @@ -26,7 +26,8 @@ import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.spark.OptimizeSparkProcContext; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; /** * Converts a join to a more optimized join for the Spark path. @@ -43,7 +44,7 @@ public SparkJoinOptimizer(ParseContext procCtx) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { OptimizeSparkProcContext context = (OptimizeSparkProcContext) procCtx; HiveConf conf = context.getConf(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java index d8f37ae..0b494e7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java @@ -22,7 +22,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.OperatorUtils; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -69,7 +70,7 @@ * might as well do it here. */ public Object - process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) + process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { OptimizeSparkProcContext context = (OptimizeSparkProcContext) procCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java index f296a53..352079a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java @@ -20,10 +20,12 @@ import java.util.ArrayList; import java.util.HashMap; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.GroupByOperator; @@ -43,6 +45,7 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.spark.GenSparkProcContext; import org.apache.hadoop.hive.ql.plan.BaseWork; @@ -68,7 +71,7 @@ private boolean hasGroupBy = false; @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { GenSparkProcContext context = (GenSparkProcContext) procCtx; hasGroupBy = true; @@ -109,7 +112,7 @@ private boolean hasGroupBy(Operator mapjoinOp, */ @SuppressWarnings("unchecked") @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { GenSparkProcContext context = (GenSparkProcContext) procContext; @@ -119,8 +122,8 @@ public Object process(Node nd, Stack stack, } MapJoinOperator mapJoinOp = (MapJoinOperator)nd; - - if (stack.size() < 2 || !(stack.get(stack.size() - 2) instanceof ReduceSinkOperator)) { + Node node = Utils.get(stack, stack.size() - 2); + if (stack.size() < 2 || !(node instanceof ReduceSinkOperator)) { context.currentMapJoinOperators.add(mapJoinOp); return null; } @@ -128,7 +131,7 @@ public Object process(Node nd, Stack stack, context.preceedingWork = null; context.currentRootOperator = null; - ReduceSinkOperator parentRS = (ReduceSinkOperator)stack.get(stack.size() - 2); + ReduceSinkOperator parentRS = (ReduceSinkOperator)node; // remove the tag for in-memory side of mapjoin parentRS.getConf().setSkipTag(true); parentRS.setSkipTag(true); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSMBJoinHintOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSMBJoinHintOptimizer.java index f455748..cc14fab 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSMBJoinHintOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSMBJoinHintOptimizer.java @@ -19,7 +19,8 @@ package org.apache.hadoop.hive.ql.optimizer.spark; import java.util.List; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -52,7 +53,7 @@ public SparkSMBJoinHintOptimizer() { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { MapJoinOperator mapJoinOp = (MapJoinOperator) nd; SortBucketJoinProcCtx smbJoinContext = (SortBucketJoinProcCtx) procCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinProcFactory.java index abc9fcf..e0747cc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinProcFactory.java @@ -55,7 +55,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; /** * Spark-version of SkewJoinProcFactory. @@ -79,7 +80,7 @@ public static NodeProcessor getJoinProc() { public static class SparkSkewJoinJoinProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { SparkSkewJoinResolver.SparkSkewJoinProcCtx context = (SparkSkewJoinResolver.SparkSkewJoinProcCtx) procCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinResolver.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinResolver.java index 608a0de..6f9efb9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinResolver.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinResolver.java @@ -25,7 +25,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; @@ -73,7 +74,7 @@ public SparkSkewJoinTaskDispatcher(PhysicalContext context) { } @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { @SuppressWarnings("unchecked") diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSortMergeJoinOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSortMergeJoinOptimizer.java index 845fbb5..66a78a1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSortMergeJoinOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSortMergeJoinOptimizer.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.optimizer.AbstractSMBJoinProc; import org.apache.hadoop.hive.ql.optimizer.SortBucketJoinProcCtx; import org.apache.hadoop.hive.ql.parse.ParseContext; @@ -33,7 +34,8 @@ import org.apache.hadoop.hive.ql.parse.spark.OptimizeSparkProcContext; import org.apache.hadoop.hive.ql.plan.OperatorDesc; -import java.util.Stack; +import java.util.Deque; +import java.util.Iterator; /** * Converts a common join operator to an SMB join if eligible. Handles auto SMB conversion. @@ -48,7 +50,7 @@ public SparkSortMergeJoinOptimizer() { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { JoinOperator joinOp = (JoinOperator) nd; @@ -72,7 +74,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, protected boolean canConvertJoinToSMBJoin(JoinOperator joinOperator, SortBucketJoinProcCtx smbJoinContext, ParseContext pGraphContext, - Stack stack) throws SemanticException { + Deque stack) throws SemanticException { if (!supportBucketMapJoin(stack)) { return false; } @@ -81,10 +83,10 @@ protected boolean canConvertJoinToSMBJoin(JoinOperator joinOperator, //Preliminary checks. In the MR version of the code, these used to be done via another walk, //here it is done inline. - private boolean supportBucketMapJoin(Stack stack) { + private boolean supportBucketMapJoin(Deque stack) { int size = stack.size(); - if (!(stack.get(size - 1) instanceof JoinOperator) - || !(stack.get(size - 2) instanceof ReduceSinkOperator)) { + if (!(Utils.get(stack, size - 1) instanceof JoinOperator) + || !(Utils.get(stack, size - 2) instanceof ReduceSinkOperator)) { return false; } @@ -92,7 +94,7 @@ private boolean supportBucketMapJoin(Stack stack) { // not be converted. for (int pos = size - 3; pos >= 0; pos--) { @SuppressWarnings("unchecked") - Operator op = (Operator) stack.get(pos); + Operator op = (Operator) Utils.get(stack, pos); if (!op.supportAutomaticSortMergeJoin()) { return false; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java index d9f70a7..3bc447e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java @@ -27,7 +27,8 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -118,7 +119,7 @@ public static class TableScanStatsRule extends DefaultStatsRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TableScanOperator tsop = (TableScanOperator) nd; AnnotateStatsProcCtx aspCtx = (AnnotateStatsProcCtx) procCtx; @@ -162,7 +163,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class SelectStatsRule extends DefaultStatsRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { SelectOperator sop = (SelectOperator) nd; @@ -260,7 +261,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class FilterStatsRule extends DefaultStatsRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { AnnotateStatsProcCtx aspCtx = (AnnotateStatsProcCtx) procCtx; FilterOperator fop = (FilterOperator) nd; @@ -987,7 +988,7 @@ private long evaluateChildExpr(Statistics stats, ExprNodeDesc child, public static class GroupByStatsRule extends DefaultStatsRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { GroupByOperator gop = (GroupByOperator) nd; @@ -1411,7 +1412,7 @@ private boolean checkMapSideAggregation(GroupByOperator gop, @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { long newNumRows = 0; CommonJoinOperator jop = (CommonJoinOperator) nd; @@ -2035,7 +2036,7 @@ private long getDenominator(List distinctVals) { public static class LimitStatsRule extends DefaultStatsRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LimitOperator lop = (LimitOperator) nd; Operator parent = lop.getParentOperators().get(0); @@ -2100,7 +2101,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class ReduceSinkStatsRule extends DefaultStatsRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator rop = (ReduceSinkOperator) nd; Operator parent = rop.getParentOperators().get(0); @@ -2163,7 +2164,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class DefaultStatsRule implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { Operator op = (Operator) nd; OperatorDesc conf = op.getConf(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java index 2a7f3d4..3bed6a5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java @@ -19,9 +19,11 @@ import java.util.ArrayList; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; @@ -32,6 +34,7 @@ import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext.UnionParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -47,12 +50,12 @@ private UnionProcFactory() { // prevent instantiation } - public static int getPositionParent(UnionOperator union, Stack stack) { + public static int getPositionParent(UnionOperator union, Deque stack) { int pos = 0; int size = stack.size(); - assert size >= 2 && stack.get(size - 1) == union; + assert size >= 2 && Utils.get(stack, size - 1) == union; Operator parent = - (Operator) stack.get(size - 2); + (Operator) Utils.get(stack, size - 2); List> parUnion = union .getParentOperators(); pos = parUnion.indexOf(parent); @@ -66,7 +69,7 @@ public static int getPositionParent(UnionOperator union, Stack stack) { public static class MapRedUnion implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { UnionOperator union = (UnionOperator) nd; UnionProcContext ctx = (UnionProcContext) procCtx; @@ -91,7 +94,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class MapUnion implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { UnionOperator union = (UnionOperator) nd; UnionProcContext ctx = (UnionProcContext) procCtx; @@ -116,7 +119,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class UnknownUnion implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { UnionOperator union = (UnionOperator) nd; UnionProcContext ctx = (UnionProcContext) procCtx; @@ -131,7 +134,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, UnionOperator parentUnionOperator = null; while (start >= 0) { Operator parent = - (Operator) stack.get(start); + (Operator) Utils.get(stack, start); if (parent instanceof UnionOperator) { parentUnionOperator = (UnionOperator) parent; break; @@ -174,7 +177,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class UnionNoProcessFile implements NodeProcessor { private void pushOperatorsAboveUnion(UnionOperator union, - Stack stack, int pos) throws SemanticException { + Deque stack, int pos) throws SemanticException { // Clone all the operators between union and filescan, and push them above // the union. Remove the union (the tree below union gets delinked after that) try { @@ -187,7 +190,7 @@ private void pushOperatorsAboveUnion(UnionOperator union, for (; pos < stack.size() - 1; pos++) { Operator originalOp = - (Operator)stack.get(pos); + (Operator)Utils.get(stack, pos); for (int p = 0; p < numParents; p++) { OperatorDesc cloneDesc = (OperatorDesc)originalOp.getConf().clone(); @@ -209,7 +212,7 @@ private void pushOperatorsAboveUnion(UnionOperator union, // possibly running in parallel. Those sub-queries cannot write to the same // directory. Clone the filesink, but create a sub-directory in the final path // for each sub-query. Also, these different filesinks need to be linked to each other - FileSinkOperator fileSinkOp = (FileSinkOperator)stack.get(pos); + FileSinkOperator fileSinkOp = (FileSinkOperator)Utils.get(stack, pos); // For file sink operator, change the directory name Path parentDirName = fileSinkOp.getConf().getDirName(); @@ -242,7 +245,7 @@ private void pushOperatorsAboveUnion(UnionOperator union, } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FileSinkOperator fileSinkOp = (FileSinkOperator)nd; @@ -262,7 +265,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, // this operator can be removed. while (pos >= 0) { Operator operator = - (Operator)stack.get(pos); + (Operator)Utils.get(stack, pos); // (1) Because we have operator.supportUnionRemoveOptimization() for // true only in SEL and FIL operators, @@ -308,7 +311,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class NoUnion implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/AppMasterEventProcessor.java ql/src/java/org/apache/hadoop/hive/ql/parse/AppMasterEventProcessor.java index 9442ba3..5c8bcc1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/AppMasterEventProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/AppMasterEventProcessor.java @@ -20,7 +20,8 @@ import java.util.ArrayList; import java.util.List; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,7 +41,7 @@ static final private Logger LOG = LoggerFactory.getLogger(AppMasterEventProcessor.class.getName()); @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { GenTezProcContext context = (GenTezProcContext) procCtx; AppMasterEventOperator event = (AppMasterEventOperator) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java index 191c82e..c1d75a6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.parse; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,7 +38,7 @@ static final private Logger LOG = LoggerFactory.getLogger(FileSinkProcessor.class.getName()); @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java index 7f5fdff..a5fa054 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java @@ -615,16 +615,16 @@ public static void removeSemiJoinOperator(ParseContext context, private static class DynamicValuePredicateProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { DynamicValuePredicateContext ctx = (DynamicValuePredicateContext) procCtx; - ExprNodeDesc parent = (ExprNodeDesc) stack.get(stack.size() - 2); + ExprNodeDesc parent = (ExprNodeDesc) Utils.get(stack, stack.size() - 2); if (parent instanceof ExprNodeGenericFuncDesc) { ExprNodeGenericFuncDesc parentFunc = (ExprNodeGenericFuncDesc) parent; if (parentFunc.getGenericUDF() instanceof GenericUDFBetween || parentFunc.getGenericUDF() instanceof GenericUDFInBloomFilter) { ExprNodeDesc grandParent = stack.size() >= 3 ? - (ExprNodeDesc) stack.get(stack.size() - 3) : null; + (ExprNodeDesc) Utils.get(stack, stack.size() - 3) : null; ctx.childParentMapping.put(parentFunc, grandParent); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java index 97f3300..238988d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java @@ -19,11 +19,12 @@ package org.apache.hadoop.hive.ql.parse; import java.util.ArrayList; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Stack; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator; @@ -69,7 +70,7 @@ public GenTezWork(GenTezUtils utils) { } @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { @@ -495,9 +496,18 @@ private int getFollowingWorkIndex(TezWork tezWork, UnionWork unionWork, ReduceSi @SuppressWarnings("unchecked") private Operator getParentFromStack(Node currentMergeJoinOperator, - Stack stack) { - int pos = stack.indexOf(currentMergeJoinOperator); - return (Operator) stack.get(pos - 1); + Deque stack) { + Iterator iterator = stack.iterator(); + Node parent = null; + while (iterator.hasNext()) { + Node current = iterator.next(); + if (current == currentMergeJoinOperator) { + break; + } else { + parent = current; + } + } + return (Operator) parent; } private void connectUnionWorkWithWork(UnionWork unionWork, BaseWork work, TezWork tezWork, diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java index fe065f8..3299d85 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java @@ -25,7 +25,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.LinkedHashSet; import org.slf4j.Logger; @@ -100,7 +101,7 @@ private void analyzeCreateMacro(ASTNode ast) throws SemanticException { Node expression = (Node) ast.getChild(2); PreOrderWalker walker = new PreOrderWalker(new Dispatcher() { @Override - public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) + public Object dispatch(Node nd, Deque stack, Object... nodeOutputs) throws SemanticException { if(nd instanceof ASTNode) { ASTNode node = (ASTNode)nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java index eb6b83a..14857f9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java @@ -20,7 +20,8 @@ import java.io.PrintStream; import java.util.HashMap; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.lib.Node; @@ -79,7 +80,7 @@ private String getChildren(Operator op) { return ret.toString(); } - public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, + public Object process(Node nd, Deque stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { Operator op = (Operator) nd; if (opMap.get(op) == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java index c13a404..89b861b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java @@ -20,7 +20,8 @@ import java.util.List; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -65,7 +66,7 @@ public ProcessAnalyzeTable(GenTezUtils utils) { @SuppressWarnings("unchecked") @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java index ec52741..610b398 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java @@ -21,7 +21,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.ql.Context; @@ -252,7 +253,7 @@ ColumnInfo getRightOuterColInfo() { boolean forHavingClause; String parentQueryNewAlias; NodeProcessor defaultExprProcessor; - Stack stack; + Deque stack; ConjunctAnalyzer(RowResolver parentQueryRR, boolean forHavingClause, @@ -261,7 +262,7 @@ ColumnInfo getRightOuterColInfo() { defaultExprProcessor = new DefaultExprProcessor(); this.forHavingClause = forHavingClause; this.parentQueryNewAlias = parentQueryNewAlias; - stack = new Stack(); + stack = new ArrayDeque(); } /* diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index f765d99..a121836 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2441,7 +2441,7 @@ private void replaceViewReferenceWithDefinition(QB qb, Table tab, viewTree = tree; Dispatcher nodeOriginDispatcher = new Dispatcher() { @Override - public Object dispatch(Node nd, java.util.Stack stack, + public Object dispatch(Node nd, java.util.Deque stack, Object... nodeOutputs) { ((ASTNode) nd).setOrigin(viewOrigin); return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java index a2042dc..ea4afca 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java @@ -21,7 +21,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -95,7 +96,7 @@ public TableAccessInfo analyzeTableAccess() throws SemanticException { private NodeProcessor getDefaultProc() { return new NodeProcessor() { @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return null; } @@ -113,7 +114,7 @@ public GroupByProcessor(ParseContext pGraphContext) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) { GroupByOperator op = (GroupByOperator)nd; TableAccessCtx tableAccessCtx = (TableAccessCtx)procCtx; @@ -160,7 +161,7 @@ public JoinProcessor(ParseContext pGraphContext) { } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) { JoinOperator op = (JoinOperator)nd; TableAccessCtx tableAccessCtx = (TableAccessCtx)procCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java index 47b229f..4e46fe1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java @@ -237,7 +237,7 @@ private void removeCycleOperator(Set> component, OptimizeTezProcCont AtomicInteger index = new AtomicInteger(); Map, Integer> indexes = new HashMap, Integer>(); Map, Integer> lowLinks = new HashMap, Integer>(); - Stack> nodes = new Stack>(); + ArrayDeque> nodes = new ArrayDeque>(); Set>> components = new LinkedHashSet>>(); for (Operator o : deque) { @@ -249,7 +249,7 @@ private void removeCycleOperator(Set> component, OptimizeTezProcCont return components; } - private void connect(Operator o, AtomicInteger index, Stack> nodes, + private void connect(Operator o, AtomicInteger index, ArrayDeque> nodes, Map, Integer> indexes, Map, Integer> lowLinks, Set>> components, ParseContext parseContext) { @@ -578,11 +578,11 @@ protected void optimizeTaskPlan(List> rootTasks, Pa private static class SMBJoinOpProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { SMBJoinOpProcContext ctx = (SMBJoinOpProcContext) procCtx; ctx.JoinOpToTsOpMap.put((CommonMergeJoinOperator) nd, - (TableScanOperator) stack.get(0)); + (TableScanOperator) stack.peekLast()); return null; } } @@ -655,12 +655,11 @@ private static void removeSemijoinOptimizationFromSMBJoins( private static class SemiJoinCycleRemovalDueToMapsideJoins implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { - SemiJoinCycleRemovalDueTOMapsideJoinContext ctx = (SemiJoinCycleRemovalDueTOMapsideJoinContext) procCtx; - ctx.childParentMap.put((Operator)stack.get(stack.size() - 2), (Operator) nd); + ctx.childParentMap.put((Operator)Utils.get(stack, stack.size() - 2), (Operator) nd); return null; } } @@ -750,7 +749,7 @@ private static void removeSemiJoinCyclesDueToMapsideJoins( private static class SemiJoinRemovalIfNoStatsProc implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { assert nd instanceof ReduceSinkOperator; ReduceSinkOperator rs = (ReduceSinkOperator) nd; @@ -763,7 +762,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, // This is a semijoin branch. The stack should look like, // -SEL-GB1-RS1-GB2-RS2 - GroupByOperator gbOp = (GroupByOperator) (stack.get(stack.size() - 2)); + GroupByOperator gbOp = (GroupByOperator) (Utils.get(stack, stack.size() - 2)); GroupByDesc gbDesc = gbOp.getConf(); ArrayList aggregationDescs = gbDesc.getAggregators(); boolean removeSemiJoin = false; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index f979c14..7857edb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -25,10 +25,12 @@ import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.calcite.rel.RelNode; import org.apache.commons.lang.StringUtils; @@ -53,6 +55,7 @@ import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; import org.apache.hadoop.hive.ql.lib.ExpressionWalker; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.optimizer.ConstantPropagateProcFactory; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSubquerySemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter; @@ -251,7 +254,7 @@ public static ExprNodeDesc processGByExpr(Node nd, Object procCtx) public static class NullExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TypeCheckCtx ctx = (TypeCheckCtx) procCtx; @@ -284,7 +287,7 @@ public NullExprProcessor getNullExprProcessor() { public static class NumExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TypeCheckCtx ctx = (TypeCheckCtx) procCtx; @@ -374,7 +377,7 @@ public NumExprProcessor getNumExprProcessor() { public static class StrExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TypeCheckCtx ctx = (TypeCheckCtx) procCtx; @@ -432,7 +435,7 @@ public StrExprProcessor getStrExprProcessor() { public static class BoolExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TypeCheckCtx ctx = (TypeCheckCtx) procCtx; @@ -478,7 +481,7 @@ public BoolExprProcessor getBoolExprProcessor() { public static class DateTimeExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TypeCheckCtx ctx = (TypeCheckCtx) procCtx; @@ -521,7 +524,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, private static final BigDecimal NANOS_PER_SEC_BD = new BigDecimal(DateUtils.NANOS_PER_SEC); @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TypeCheckCtx ctx = (TypeCheckCtx) procCtx; @@ -602,7 +605,7 @@ public DateTimeExprProcessor getDateTimeExprProcessor() { public static class ColumnExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TypeCheckCtx ctx = (TypeCheckCtx) procCtx; @@ -616,7 +619,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } ASTNode expr = (ASTNode) nd; - ASTNode parent = stack.size() > 1 ? (ASTNode) stack.get(stack.size() - 2) : null; + ASTNode parent = stack.size() > 1 ? (ASTNode) Utils.get(stack, stack.size() - 2) : null; RowResolver input = ctx.getInputRR(); if (expr.getType() != HiveParser.TOK_TABLE_OR_COL) { @@ -656,9 +659,9 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, // It's not a column or a table alias. if (input.getIsExprResolver()) { ASTNode exprNode = expr; - if (!stack.empty()) { + if (!stack.isEmpty()) { ASTNode tmp = (ASTNode) stack.pop(); - if (!stack.empty()) { + if (!stack.isEmpty()) { exprNode = (ASTNode) stack.peek(); } stack.push(tmp); @@ -1192,7 +1195,7 @@ protected ExprNodeDesc processQualifiedColRef(TypeCheckCtx ctx, ASTNode expr, } @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TypeCheckCtx ctx = (TypeCheckCtx) procCtx; @@ -1389,7 +1392,7 @@ public DefaultExprProcessor getDefaultExprProcessor() { public static class SubQueryExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TypeCheckCtx ctx = (TypeCheckCtx) procCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java index 1ee4ea9..5aa5303 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.parse; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,7 +38,7 @@ static final private Logger LOG = LoggerFactory.getLogger(UnionProcessor.class.getName()); @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { GenTezProcContext context = (GenTezProcContext) procCtx; UnionOperator union = (UnionOperator) nd; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java index afbeccb..9d5ab4d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java @@ -23,7 +23,8 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -72,7 +73,7 @@ public GenSparkWork(GenSparkUtils utils) { } @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { GenSparkProcContext context = (GenSparkProcContext) procContext; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java index 682b987..33f584c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java @@ -25,7 +25,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hive.conf.HiveConf; @@ -185,7 +186,7 @@ private void removeDPPOperator(Set> component, OptimizeSparkProcCont AtomicInteger index = new AtomicInteger(); Map, Integer> indexes = new HashMap, Integer>(); Map, Integer> lowLinks = new HashMap, Integer>(); - Stack> nodes = new Stack>(); + ArrayDeque> nodes = new ArrayDeque>(); Set>> components = new HashSet>>(); for (Operator o : procCtx.getParseContext().getTopOps().values()) { @@ -196,7 +197,7 @@ private void removeDPPOperator(Set> component, OptimizeSparkProcCont return components; } - private void connect(Operator o, AtomicInteger index, Stack> nodes, + private void connect(Operator o, AtomicInteger index, ArrayDeque> nodes, Map, Integer> indexes, Map, Integer> lowLinks, Set>> components) { @@ -416,7 +417,7 @@ private void generateTaskTreeHelper(GenSparkProcContext procCtx, List topN opRules.put(new RuleRegExp("Remember union", UnionOperator.getOperatorName() + "%"), new NodeProcessor() { @Override - public Object process(Node n, Stack s, + public Object process(Node n, Deque s, NodeProcessorCtx procCtx, Object... os) throws SemanticException { GenSparkProcContext context = (GenSparkProcContext) procCtx; UnionOperator union = (UnionOperator) n; @@ -445,7 +446,7 @@ public Object process(Node n, Stack s, opRules.put(new TypeRule(SMBMapJoinOperator.class), new NodeProcessor() { @Override - public Object process(Node currNode, Stack stack, + public Object process(Node currNode, Deque stack, NodeProcessorCtx procCtx, Object... os) throws SemanticException { GenSparkProcContext context = (GenSparkProcContext) procCtx; SMBMapJoinOperator currSmbNode = (SMBMapJoinOperator) currNode; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java index 4cc127a..ffc409a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.parse.spark; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,7 +43,7 @@ * we might as well do it here. */ @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java index 52186b4..b9e8090 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java @@ -20,7 +20,8 @@ import java.util.List; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -71,7 +72,7 @@ public SparkProcessAnalyzeTable(GenSparkUtils utils) { @SuppressWarnings("unchecked") @Override - public Object process(Node nd, Stack stack, + public Object process(Node nd, Deque stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { GenSparkProcContext context = (GenSparkProcContext) procContext; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java index d4f58be..0db3b3c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java @@ -22,7 +22,8 @@ import java.util.LinkedList; import java.util.List; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; @@ -72,7 +73,7 @@ */ public class SplitOpTreeForDPP implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { SparkPartitionPruningSinkOperator pruningSinkOp = (SparkPartitionPruningSinkOperator) nd; GenSparkProcContext context = (GenSparkProcContext) procCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java index d4df1e8..8b4318f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java @@ -21,7 +21,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ColumnInfo; @@ -67,7 +68,7 @@ * Converts the reference from child row resolver to current row resolver. */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprWalkerInfo ctx = (ExprWalkerInfo) procCtx; ExprNodeColumnDesc colref = (ExprNodeColumnDesc) nd; @@ -142,7 +143,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class FieldExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprWalkerInfo ctx = (ExprWalkerInfo) procCtx; String alias = null; @@ -192,7 +193,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class GenericFuncExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprWalkerInfo ctx = (ExprWalkerInfo) procCtx; String alias = null; @@ -256,7 +257,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class DefaultExprProcessor implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ExprWalkerInfo ctx = (ExprWalkerInfo) procCtx; ExprInfo exprInfo = ctx.addOrGetExprInfo((ExprNodeDesc) nd); diff --git ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java index ed88725..cffa747 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java @@ -26,7 +26,8 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FilterOperator; @@ -157,7 +158,7 @@ private static void removeOperator(Operator operator) { public static class ScriptPPD extends DefaultPPD implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.debug("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); @@ -188,10 +189,10 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, * condition can be pushed down as a limit pushdown(mapGroupBy=true) * * (non-Javadoc) - * @see org.apache.hadoop.hive.ql.ppd.OpProcFactory.ScriptPPD#process(org.apache.hadoop.hive.ql.lib.Node, java.util.Stack, org.apache.hadoop.hive.ql.lib.NodeProcessorCtx, java.lang.Object[]) + * @see org.apache.hadoop.hive.ql.ppd.OpProcFactory.ScriptPPD#process(org.apache.hadoop.hive.ql.lib.Node, java.util.ArrayDeque, org.apache.hadoop.hive.ql.lib.NodeProcessorCtx, java.lang.Object[]) */ @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); @@ -364,7 +365,7 @@ private void pushRankLimitToRedSink(PTFOperator ptfOp, HiveConf conf, int rLimit public static class UDTFPPD extends DefaultPPD implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { super.process(nd, stack, procCtx, nodeOutputs); OpWalkerInfo owi = (OpWalkerInfo) procCtx; @@ -383,7 +384,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class LateralViewForwardPPD extends DefaultPPD implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); @@ -410,7 +411,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class TableScanPPD extends DefaultPPD implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); @@ -438,12 +439,12 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class FilterPPD extends DefaultPPD implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return process(nd, stack, procCtx, false, nodeOutputs); } - Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, boolean onlySyntheticJoinPredicate, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); @@ -496,7 +497,7 @@ Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, public static class SimpleFilterPPD extends FilterPPD implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filterOp = (FilterOperator) nd; // We try to push the full Filter predicate iff: @@ -532,7 +533,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, */ public static class JoinerPPD extends DefaultPPD implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); @@ -651,7 +652,7 @@ protected Object handlePredicates(Node nd, ExprWalkerInfo prunePreds, OpWalkerIn } public static class ReduceSinkPPD extends DefaultPPD implements NodeProcessor { - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { super.process(nd, stack, procCtx, nodeOutputs); Operator operator = (Operator) nd; @@ -736,7 +737,7 @@ private void applyFilterTransitivity(JoinOperator join, int targetPos, OpWalkerI public static class DefaultPPD implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); diff --git ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java index 8066292..7f7f400 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java @@ -21,11 +21,13 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; @@ -43,6 +45,7 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.optimizer.Transform; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -136,12 +139,12 @@ public TransitiveContext() { private static class JoinTransitive implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { @SuppressWarnings("unchecked") CommonJoinOperator join = (CommonJoinOperator) nd; - ReduceSinkOperator source = (ReduceSinkOperator) stack.get(stack.size() - 2); - FilterOperator filter = (FilterOperator) stack.get(stack.size() - 3); + ReduceSinkOperator source = (ReduceSinkOperator) Utils.get(stack, stack.size() - 2); + FilterOperator filter = (FilterOperator) Utils.get(stack, stack.size() - 3); int srcPos = join.getParentOperators().indexOf(source); TransitiveContext context = (TransitiveContext) procCtx; diff --git ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java index 71c7310..72c20bc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java @@ -20,12 +20,15 @@ import java.util.ArrayList; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; +import org.apache.hadoop.hive.ql.lib.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -131,7 +134,7 @@ public ParseContext getParseContext() { private static class JoinSynthetic implements NodeProcessor { @Override - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ParseContext pCtx = ((SyntheticContext) procCtx).getParseContext(); @@ -139,7 +142,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, @SuppressWarnings("unchecked") CommonJoinOperator join = (CommonJoinOperator) nd; - ReduceSinkOperator source = (ReduceSinkOperator) stack.get(stack.size() - 2); + ReduceSinkOperator source = (ReduceSinkOperator) Utils.get(stack, stack.size() - 2); int srcPos = join.getParentOperators().indexOf(source); List> parents = join.getParentOperators(); diff --git ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java index aca8354..4631006 100644 --- ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java +++ ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java @@ -24,7 +24,8 @@ import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.TreeSet; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; @@ -78,7 +79,7 @@ /** * Implements the process method for the NodeProcessor interface. */ - public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + public Object process(Node nd, Deque stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ASTNode pt = (ASTNode) nd; diff --git ql/src/test/org/apache/hadoop/hive/ql/lib/TestRuleRegExp.java ql/src/test/org/apache/hadoop/hive/ql/lib/TestRuleRegExp.java index f06d0df..6468420 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lib/TestRuleRegExp.java +++ ql/src/test/org/apache/hadoop/hive/ql/lib/TestRuleRegExp.java @@ -19,6 +19,8 @@ import static org.junit.Assert.*; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.List; import java.util.Stack; @@ -60,7 +62,7 @@ public void testPatternWithoutWildCardChar() { assertEquals(rule1.rulePatternIsValidWithoutWildCardChar(), true); assertEquals(rule1.rulePatternIsValidWithWildCardChar(), false); // positive test - Stack ns1 = new Stack(); + Deque ns1 = new ArrayDeque(); ns1.push(new TestNode(ReduceSinkOperator.getOperatorName())); ns1.push(new TestNode(SelectOperator.getOperatorName())); ns1.push(new TestNode(FileSinkOperator.getOperatorName())); @@ -70,7 +72,7 @@ public void testPatternWithoutWildCardChar() { fail(e.getMessage()); } // negative test - Stack ns2 = new Stack(); + Deque ns2 = new ArrayDeque(); ns2.push(new TestNode(ReduceSinkOperator.getOperatorName())); ns1.push(new TestNode(TableScanOperator.getOperatorName())); ns1.push(new TestNode(FileSinkOperator.getOperatorName())); @@ -91,10 +93,10 @@ public void testPatternWithWildCardChar() { assertEquals(rule1.rulePatternIsValidWithoutWildCardChar(), false); assertEquals(rule1.rulePatternIsValidWithWildCardChar(), true); // positive test - Stack ns1 = new Stack(); + Deque ns1 = new ArrayDeque(); ns1.push(new TestNode(TableScanOperator.getOperatorName())); ns1.push(new TestNode(FilterOperator.getOperatorName())); - Stack ns2 = new Stack(); + Deque ns2 = new ArrayDeque(); ns2.push(new TestNode(TableScanOperator.getOperatorName())); ns2.push(new TestNode(FileSinkOperator.getOperatorName())); try { @@ -104,7 +106,7 @@ public void testPatternWithWildCardChar() { fail(e.getMessage()); } // negative test - Stack ns3 = new Stack(); + Deque ns3 = new ArrayDeque(); ns3.push(new TestNode(ReduceSinkOperator.getOperatorName())); ns3.push(new TestNode(ReduceSinkOperator.getOperatorName())); ns3.push(new TestNode(FileSinkOperator.getOperatorName()));