diff --git ql/src/java/org/apache/hadoop/hive/ql/Context.java ql/src/java/org/apache/hadoop/hive/ql/Context.java index affaec8..68548c9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -87,6 +87,7 @@ protected String cboInfo; protected boolean cboSucceeded; protected boolean explainLogical = false; + protected boolean explainExtended = false; protected String cmd = ""; // number of previous attempts protected int tryCount = 0; @@ -176,6 +177,22 @@ public void setExplainLogical(boolean explainLogical) { } /** + * Find whether the current query is a extended explain query + */ + public boolean getExplainExtended() { + return explainExtended; + } + + /** + * Set the context on whether the current query is a extended + * explain query. + */ + public void setExplainExtended(boolean explainExtended) { + this.explainExtended = explainExtended; + } + + + /** * Set the original query command. * @param cmd the original query command string */ diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java index 2d365a9..8d65d79 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java @@ -67,6 +67,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { ctx.setExplain(true); ctx.setExplainLogical(logical); + ctx.setExplainExtended(extended); // Create a semantic analyzer for the query ASTNode input = (ASTNode) ast.getChild(0); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java index 7e0e137..ab168e8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java @@ -158,9 +158,13 @@ private void runDynamicPartitionPruning(OptimizeSparkProcContext procCtx) private void runJoinOptimizations(OptimizeSparkProcContext procCtx) throws SemanticException { ParseContext pCtx = procCtx.getParseContext(); Map opRules = new LinkedHashMap(); - opRules.put(new RuleRegExp("Set parallelism - ReduceSink", - ReduceSinkOperator.getOperatorName() + "%"), - new SetSparkReducerParallelism()); + //Do not set reducer parallelism when explain without extended , + // to avoid unnecessary spark session creation + if (!pCtx.getContext().getExplain() || pCtx.getContext().getExplainExtended()) { + opRules.put(new RuleRegExp("Set parallelism - ReduceSink", + ReduceSinkOperator.getOperatorName() + "%"), + new SetSparkReducerParallelism()); + } opRules.put(new TypeRule(JoinOperator.class), new SparkJoinOptimizer(pCtx));