diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 65744ac..207603a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -593,7 +593,7 @@ private String getExplainOutput(BaseSemanticAnalyzer sem, QueryPlan plan, PrintStream ps = new PrintStream(baos); try { List> rootTasks = sem.getAllRootTasks(); - task.getJSONPlan(ps, astTree, rootTasks, sem.getFetchTask(), false, true, true); + task.getJSONPlan(ps, rootTasks, sem.getFetchTask(), false, true, true); ret = baos.toString(); } catch (Exception e) { LOG.warn("Exception generating explain output: " + e, e); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 4116141..4ce0864 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -39,8 +39,6 @@ import java.util.Set; import java.util.TreeMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory; @@ -50,7 +48,6 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger; -import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -68,6 +65,8 @@ import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ExplainTask implementation. @@ -139,18 +138,6 @@ public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws E out = null; } - // Print out the parse AST - if (work.getAstStringTree() != null) { - String jsonAST = outputAST(work.getAstStringTree(), out, jsonOutput, 0); - if (out != null) { - out.println(); - } - - if (jsonOutput) { - outJSONObject.put("ABSTRACT SYNTAX TREE", jsonAST); - } - } - if (work.getParseContext() != null) { if (out != null) { out.print("LOGICAL PLAN:"); @@ -172,11 +159,11 @@ public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws E public JSONObject getJSONPlan(PrintStream out, ExplainWork work) throws Exception { - return getJSONPlan(out, work.getAstTree(), work.getRootTasks(), work.getFetchTask(), + return getJSONPlan(out, work.getRootTasks(), work.getFetchTask(), work.isFormatted(), work.getExtended(), work.isAppendTaskType()); } - public JSONObject getJSONPlan(PrintStream out, ASTNode ast, List> tasks, Task fetchTask, + public JSONObject getJSONPlan(PrintStream out, List> tasks, Task fetchTask, boolean jsonOutput, boolean isExtended, boolean appendTaskType) throws Exception { // If the user asked for a formatted output, dump the json output @@ -187,18 +174,6 @@ public JSONObject getJSONPlan(PrintStream out, ASTNode ast, List> tasks, out = null; } - // Print out the parse AST - if (ast != null && isExtended) { - String jsonAST = outputAST(ast.dump(), out, jsonOutput, 0); - if (out != null) { - out.println(); - } - - if (jsonOutput) { - outJSONObject.put("ABSTRACT SYNTAX TREE", jsonAST); - } - } - List ordered = StageIDsRearranger.getExplainOrder(conf, tasks); if (fetchTask != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java index f490161..b7e70be 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java @@ -22,11 +22,8 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -39,6 +36,8 @@ import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -124,7 +123,6 @@ public void run() { null,// pCtx plan.getRootTasks(),// RootTasks plan.getFetchTask(),// FetchTask - null,// astStringTree null,// analyzer false,// extended true,// formatted diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index b59347d..c069dc4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -297,8 +297,8 @@ Operator genOPTree(ASTNode ast, PlannerContext plannerCtx) throws SemanticExcept LOG.info("CBO Succeeded; optimized logical plan."); this.ctx.setCboInfo("Plan optimized by CBO."); this.ctx.setCboSucceeded(true); - if (LOG.isDebugEnabled()) { - LOG.debug(newAST.dump()); + if (LOG.isTraceEnabled()) { + LOG.trace(newAST.dump()); } } } catch (Exception e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java index e393be2..eefc145 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java @@ -24,8 +24,8 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.ExplainTask; +import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.ExplainWork; @@ -102,7 +102,6 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { pCtx, tasks, fetchTask, - input, sem, extended, formatted, diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java index 132cb8d..a213c83 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java @@ -26,7 +26,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.hooks.ReadEntity; -import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ParseContext; @@ -40,8 +39,6 @@ private Path resFile; private ArrayList> rootTasks; private Task fetchTask; - private ASTNode astTree; - private String astStringTree; private HashSet inputs; private ParseContext pCtx; @@ -65,7 +62,6 @@ public ExplainWork(Path resFile, ParseContext pCtx, List> rootTasks, Task fetchTask, - ASTNode astTree, BaseSemanticAnalyzer analyzer, boolean extended, boolean formatted, @@ -77,7 +73,6 @@ public ExplainWork(Path resFile, this.resFile = resFile; this.rootTasks = new ArrayList>(rootTasks); this.fetchTask = fetchTask; - this.astTree = astTree; this.analyzer = analyzer; if (analyzer != null) { this.inputs = analyzer.getInputs(); @@ -116,17 +111,6 @@ public void setFetchTask(Task fetchTask) { this.fetchTask = fetchTask; } - public ASTNode getAstTree() { - return astTree; - } - - public String getAstStringTree() { - if (astStringTree == null) { - astStringTree = astTree.dump(); - } - return astStringTree; - } - public HashSet getInputs() { return inputs; } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java index d795324..8614d40 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java @@ -23,8 +23,6 @@ import java.util.HashMap; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -42,6 +40,8 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestUpdateDeleteSemanticAnalyzer { @@ -59,7 +59,7 @@ public void testInsertSelect() throws Exception { try { ReturnInfo rc = parseAndAnalyze("insert into table T select a, b from U", "testInsertSelect"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); @@ -70,7 +70,7 @@ public void testInsertSelect() throws Exception { public void testDeleteAllNonPartitioned() throws Exception { try { ReturnInfo rc = parseAndAnalyze("delete from T", "testDeleteAllNonPartitioned"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -80,7 +80,7 @@ public void testDeleteAllNonPartitioned() throws Exception { public void testDeleteWhereNoPartition() throws Exception { try { ReturnInfo rc = parseAndAnalyze("delete from T where a > 5", "testDeleteWhereNoPartition"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -90,7 +90,7 @@ public void testDeleteWhereNoPartition() throws Exception { public void testDeleteAllPartitioned() throws Exception { try { ReturnInfo rc = parseAndAnalyze("delete from U", "testDeleteAllPartitioned"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -100,7 +100,7 @@ public void testDeleteAllPartitioned() throws Exception { public void testDeleteAllWherePartitioned() throws Exception { try { ReturnInfo rc = parseAndAnalyze("delete from U where a > 5", "testDeleteAllWherePartitioned"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -111,7 +111,7 @@ public void testDeleteOnePartition() throws Exception { try { ReturnInfo rc = parseAndAnalyze("delete from U where ds = 'today'", "testDeleteFromPartitionOnly"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -122,7 +122,7 @@ public void testDeleteOnePartitionWhere() throws Exception { try { ReturnInfo rc = parseAndAnalyze("delete from U where ds = 'today' and a > 5", "testDeletePartitionWhere"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -132,7 +132,7 @@ public void testDeleteOnePartitionWhere() throws Exception { public void testUpdateAllNonPartitioned() throws Exception { try { ReturnInfo rc = parseAndAnalyze("update T set b = 5", "testUpdateAllNonPartitioned"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -143,7 +143,7 @@ public void testUpdateAllNonPartitionedWhere() throws Exception { try { ReturnInfo rc = parseAndAnalyze("update T set b = 5 where b > 5", "testUpdateAllNonPartitionedWhere"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -153,7 +153,7 @@ public void testUpdateAllNonPartitionedWhere() throws Exception { public void testUpdateAllPartitioned() throws Exception { try { ReturnInfo rc = parseAndAnalyze("update U set b = 5", "testUpdateAllPartitioned"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -164,7 +164,7 @@ public void testUpdateAllPartitionedWhere() throws Exception { try { ReturnInfo rc = parseAndAnalyze("update U set b = 5 where b > 5", "testUpdateAllPartitionedWhere"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -175,7 +175,7 @@ public void testUpdateOnePartition() throws Exception { try { ReturnInfo rc = parseAndAnalyze("update U set b = 5 where ds = 'today'", "testUpdateOnePartition"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -186,7 +186,7 @@ public void testUpdateOnePartitionWhere() throws Exception { try { ReturnInfo rc = parseAndAnalyze("update U set b = 5 where ds = 'today' and b > 5", "testUpdateOnePartitionWhere"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); } @@ -198,7 +198,7 @@ public void testInsertValues() throws Exception { ReturnInfo rc = parseAndAnalyze("insert into table T values ('abc', 3), ('ghi', null)", "testInsertValues"); - LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan)); } finally { cleanupTables(); @@ -212,7 +212,7 @@ public void testInsertValuesPartitioned() throws Exception { "('abc', 3, 'today'), ('ghi', 5, 'tomorrow')", "testInsertValuesPartitioned"); - LOG.info(explain((SemanticAnalyzer) rc.sem, rc.plan, rc.ast)); + LOG.info(explain((SemanticAnalyzer) rc.sem, rc.plan)); } finally { cleanupTables(); @@ -235,12 +235,10 @@ public void cleanupTables() throws HiveException { } private class ReturnInfo { - ASTNode ast; BaseSemanticAnalyzer sem; QueryPlan plan; - ReturnInfo(ASTNode a, BaseSemanticAnalyzer s, QueryPlan p) { - ast = a; + ReturnInfo(BaseSemanticAnalyzer s, QueryPlan p) { sem = s; plan = p; } @@ -283,10 +281,10 @@ private ReturnInfo parseAndAnalyze(String query, String testName) QueryPlan plan = new QueryPlan(query, sem, 0L, testName, null, null); - return new ReturnInfo(tree, sem, plan); + return new ReturnInfo(sem, plan); } - private String explain(SemanticAnalyzer sem, QueryPlan plan, ASTNode astTree) throws + private String explain(SemanticAnalyzer sem, QueryPlan plan) throws IOException { FileSystem fs = FileSystem.get(conf); File f = File.createTempFile("TestSemanticAnalyzer", "explain"); @@ -294,7 +292,7 @@ private String explain(SemanticAnalyzer sem, QueryPlan plan, ASTNode astTree) th fs.create(tmp); fs.deleteOnExit(tmp); ExplainWork work = new ExplainWork(tmp, sem.getParseContext(), sem.getRootTasks(), - sem.getFetchTask(), astTree, sem, true, false, false, false, false, false, null); + sem.getFetchTask(), sem, true, false, false, false, false, false, null); ExplainTask task = new ExplainTask(); task.setWork(work); task.initialize(conf, plan, null, null);