diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java index fcfe658..1b9e1d7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java @@ -26,9 +26,12 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; +import org.apache.hadoop.hive.ql.exec.SelectOperator; +import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; import org.apache.hadoop.hive.ql.lib.Dispatcher; import org.apache.hadoop.hive.ql.lib.ForwardWalker; @@ -63,6 +66,9 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { // 2. Trigger transformation Map opRules = new LinkedHashMap(); opRules.put(new RuleRegExp("R1", JoinOperator.getOperatorName() + "%"), new JoinAnnotate()); + opRules.put(new RuleRegExp("R2", TableScanOperator.getOperatorName() + "%"), new TableScanAnnotate()); + opRules.put(new RuleRegExp("R3", FilterOperator.getOperatorName() + "%"), new FilterAnnotate()); + opRules.put(new RuleRegExp("R4", SelectOperator.getOperatorName() + "%"), new SelectAnnotate()); Dispatcher disp = new DefaultRuleDispatcher(new DefaultAnnotate(), opRules, null); GraphWalker ogw = new ForwardWalker(disp); @@ -112,6 +118,60 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } + private class TableScanAnnotate implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + TableScanOperator tableScanOp = (TableScanOperator) nd; + + // 1. Get alias from topOps + String opAlias = null; + for (Map.Entry> topOpEntry : pctx.getTopOps().entrySet()) { + if (topOpEntry.getValue() == tableScanOp) { + opAlias = topOpEntry.getKey(); + } + } + + assert opAlias != null; + + // 2. Add alias to 1) aliasToOpInfo and 2) opToAlias + aliasToOpInfo.put(opAlias, tableScanOp); + opToAlias.put(tableScanOp.toString(), opAlias); + + return null; + } + } + + private class FilterAnnotate implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + Operator op = (Operator) nd; + propagateAlias(op); + return null; + } + } + + private class SelectAnnotate implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + Operator op = (Operator) nd; + propagateAlias(op); + return null; + } + } + + private void propagateAlias(Operator op) { + assert op.getParentOperators().size() == 1; + + final String opAlias = opToAlias.get(op.getParentOperators().get(0).toString()); + opToAlias.put(op.toString(), opAlias); + } + private class DefaultAnnotate implements NodeProcessor { @Override @@ -121,7 +181,6 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, // 1. Generate self alias final String opAlias = genUniqueAlias(); - aliasToOpInfo.put(opAlias, op); opToAlias.put(op.toString(), opAlias); return null;