diff --git ql/src/java/org/apache/hadoop/hive/ql/Context.java ql/src/java/org/apache/hadoop/hive/ql/Context.java index 9692738..a74bbbe 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.net.URI; import java.text.SimpleDateFormat; -import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -86,6 +85,7 @@ protected int pathid = 10000; protected boolean explain = false; protected String cboInfo; + protected boolean cboSucceeded; protected boolean explainLogical = false; protected String cmd = ""; // number of previous attempts @@ -706,4 +706,12 @@ public void setCboInfo(String cboInfo) { this.cboInfo = cboInfo; } + public boolean isCboSucceeded() { + return cboSucceeded; + } + + public void setCboSucceeded(boolean cboSucceeded) { + this.cboSucceeded = cboSucceeded; + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java index 433699b..e3d3ce6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java @@ -26,8 +26,10 @@ import com.google.common.base.Predicates; import com.google.common.collect.Iterators; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -69,6 +71,16 @@ private static final Log LOG = LogFactory.getLog(IdentityProjectRemover.class); @Override public ParseContext transform(ParseContext pctx) throws SemanticException { + // 0. We check the conditions to apply this transformation, + // if we do not meet them we bail out + final boolean cboEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_ENABLED); + final boolean returnPathEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP); + final boolean cboSucceeded = pctx.getContext().isCboSucceeded(); + if(cboEnabled && returnPathEnabled && cboSucceeded) { + return pctx; + } + + // 1. We apply the transformation Map opRules = new LinkedHashMap(); opRules.put(new RuleRegExp("R1", "(" + SelectOperator.getOperatorName() + "%)"), new ProjectRemover()); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java index 95c2b0b..3006a6e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java @@ -28,6 +28,7 @@ import java.util.Set; import java.util.Stack; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -57,6 +58,16 @@ @Override public ParseContext transform(ParseContext pctx) throws SemanticException { + // 0. We check the conditions to apply this transformation, + // if we do not meet them we bail out + final boolean cboEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_ENABLED); + final boolean returnPathEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP); + final boolean cboSucceeded = pctx.getContext().isCboSucceeded(); + if(cboEnabled && returnPathEnabled && cboSucceeded) { + return pctx; + } + + // 1. We apply the transformation String SEL = SelectOperator.getOperatorName(); String FIL = FilterOperator.getOperatorName(); Map opRules = new LinkedHashMap(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java index 5afd4f2..5cb5e39 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java @@ -62,11 +62,9 @@ public void initialize(HiveConf hiveConf) { transformations = new ArrayList(); - // If we are translating Calcite operators into Hive operators, we need - // additional postprocessing - if(HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP)) { - transformations.add(new HiveOpConverterPostProc()); - } + // Add the additional postprocessing transformations needed if + // we are translating Calcite operators into Hive operators. + transformations.add(new HiveOpConverterPostProc()); // Add the transformation that computes the lineage information. transformations.add(new Generator()); @@ -149,11 +147,8 @@ public void initialize(HiveConf hiveConf) { if(HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTREDUCEDEDUPLICATION)) { transformations.add(new ReduceSinkDeDuplication()); } - if(!HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP)) { - transformations.add(new NonBlockingOpDeDupProc()); - } - if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEIDENTITYPROJECTREMOVER) - && !HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP)) { + transformations.add(new NonBlockingOpDeDupProc()); + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEIDENTITYPROJECTREMOVER)) { transformations.add(new IdentityProjectRemover()); } if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVELIMITOPTENABLE)) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java index fcfe658..5bb49fe 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java @@ -26,6 +26,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -54,6 +55,15 @@ @Override public ParseContext transform(ParseContext pctx) throws SemanticException { + // 0. We check the conditions to apply this transformation, + // if we do not meet them we bail out + final boolean cboEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_ENABLED); + final boolean returnPathEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP); + final boolean cboSucceeded = pctx.getContext().isCboSucceeded(); + if(!(cboEnabled && returnPathEnabled && cboSucceeded)) { + return pctx; + } + // 1. Initialize aux data structures this.pctx = pctx; this.aliasToOpInfo = new HashMap>(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 6a15bf6..38041f9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -263,6 +263,7 @@ Operator genOPTree(ASTNode ast, PlannerContext plannerCtx) throws SemanticExcept sinkOp = genPlan(getQB()); LOG.info("CBO Succeeded; optimized logical plan."); this.ctx.setCboInfo("Plan optimized by CBO."); + this.ctx.setCboSucceeded(true); LOG.debug(newAST.dump()); } } catch (Exception e) {