diff --git a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java index fb12f70..4e8d759 100644 --- a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java +++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; -import java.io.PrintStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -96,14 +95,12 @@ private void inlineJoinOp() throws Exception { } } // inline merge join operator in a self-join - else if (this.name.equals("Merge Join Operator")) { + else { if (this.vertex != null) { for (Vertex v : this.vertex.mergeJoinDummyVertexs) { TezJsonParser.addInline(this, new Connection(null, v)); } } - } else { - throw new Exception("Unknown join operator"); } } @@ -123,23 +120,23 @@ private String getNameWithOpId() { * operator so that we can decide the corresponding indent. * @throws Exception */ - public void print(PrintStream out, List indentFlag, boolean branchOfJoinOp) + public void print(List indentFlag, boolean branchOfJoinOp) throws Exception { // print name if (TezJsonParser.printSet.contains(this)) { - out.println(TezJsonParser.prefixString(indentFlag) + " Please refer to the previous " + PrintUtils.println(TezJsonParser.prefixString(indentFlag) + " Please refer to the previous " + this.getNameWithOpId()); return; } TezJsonParser.printSet.add(this); if (!branchOfJoinOp) { - out.println(TezJsonParser.prefixString(indentFlag) + this.getNameWithOpId()); + PrintUtils.println(TezJsonParser.prefixString(indentFlag) + this.getNameWithOpId()); } else { - out.println(TezJsonParser.prefixString(indentFlag, "|<-") + this.getNameWithOpId()); + PrintUtils.println(TezJsonParser.prefixString(indentFlag, "|<-") + this.getNameWithOpId()); } branchOfJoinOp = false; - // if this operator is a join operator - if (this.name.contains("Join")) { + // if this operator is a Map Join Operator or a Merge Join Operator + if (this.name.equals("Map Join Operator") || this.name.equals("Merge Join Operator")) { inlineJoinOp(); branchOfJoinOp = true; } @@ -167,7 +164,7 @@ public void print(PrintStream out, List indentFlag, boolean branchOfJoi } Collections.sort(attrs); for (Attr attr : attrs) { - out.println(TezJsonParser.prefixString(attFlag) + attr.toString()); + PrintUtils.println(TezJsonParser.prefixString(attFlag) + attr.toString()); } // print inline vertex if (TezJsonParser.inlineMap.containsKey(this)) { @@ -185,7 +182,7 @@ public void print(PrintStream out, List indentFlag, boolean branchOfJoi else { vertexFlag.add(false); } - connection.from.print(out, vertexFlag, connection.type, this.vertex); + connection.from.print(vertexFlag, connection.type, this.vertex); } } // print parent op, i.e., where data comes from @@ -193,7 +190,7 @@ public void print(PrintStream out, List indentFlag, boolean branchOfJoi List parentFlag = new ArrayList<>(); parentFlag.addAll(indentFlag); parentFlag.add(false); - this.parent.print(out, parentFlag, branchOfJoinOp); + this.parent.print(parentFlag, branchOfJoinOp); } // print next vertex else { @@ -206,7 +203,7 @@ public void print(PrintStream out, List indentFlag, boolean branchOfJoi } else { vertexFlag.add(false); } - v.print(out, vertexFlag, noninlined.get(index).type, this.vertex); + v.print(vertexFlag, noninlined.get(index).type, this.vertex); } } } diff --git a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/PrintUtils.java b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/PrintUtils.java new file mode 100644 index 0000000..c21ef41 --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/PrintUtils.java @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.common.jsonexplain.tez; + +public class PrintUtils { + static StringBuilder builder; + static String lineSeparator; + + public static void print(String string) { + builder.append(string); + } + + public static void println(String string) { + builder.append(string); + builder.append(lineSeparator); + } + + public static void println() { + builder.append(lineSeparator); + } +} diff --git a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java index 10e0a0c..680a3a6 100644 --- a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java +++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; import java.io.IOException; -import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -147,11 +146,13 @@ public void extractVertex(JSONObject object) throws Exception { } } else { String[] names = JSONObject.getNames(object); - for (String name : names) { - if (name.contains("Operator")) { - this.op = extractOp(name, object.getJSONObject(name)); - } else { - attrs.add(new Attr(name, object.get(name).toString())); + if (names != null) { + for (String name : names) { + if (name.contains("Operator")) { + this.op = extractOp(name, object.getJSONObject(name)); + } else { + attrs.add(new Attr(name, object.get(name).toString())); + } } } } @@ -217,37 +218,37 @@ private boolean isPrintable(Object val) { return false; } - public void print(PrintStream out, List indentFlag) throws JSONException, Exception { + public void print(List indentFlag) throws JSONException, Exception { // print stagename if (TezJsonParser.printSet.contains(this)) { - out.println(TezJsonParser.prefixString(indentFlag) + " Please refer to the previous " + PrintUtils.println(TezJsonParser.prefixString(indentFlag) + " Please refer to the previous " + this.name); return; } TezJsonParser.printSet.add(this); - out.println(TezJsonParser.prefixString(indentFlag) + this.name); + PrintUtils.println(TezJsonParser.prefixString(indentFlag) + this.name); // print vertexes List nextIndentFlag = new ArrayList<>(); nextIndentFlag.addAll(indentFlag); nextIndentFlag.add(false); for (Vertex candidate : this.vertexs.values()) { if (!TezJsonParser.isInline(candidate) && candidate.children.isEmpty()) { - candidate.print(out, nextIndentFlag, null, null); + candidate.print(nextIndentFlag, null, null); } } if (!attrs.isEmpty()) { Collections.sort(attrs); for (Attr attr : attrs) { - out.println(TezJsonParser.prefixString(nextIndentFlag) + attr.toString()); + PrintUtils.println(TezJsonParser.prefixString(nextIndentFlag) + attr.toString()); } } if (op != null) { - op.print(out, nextIndentFlag, false); + op.print(nextIndentFlag, false); } nextIndentFlag.add(false); // print dependent stages for (Stage stage : this.parentStages) { - stage.print(out, nextIndentFlag); + stage.print(nextIndentFlag); } } } diff --git a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java index 43ddff3..bbd6a46 100644 --- a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java +++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java @@ -37,20 +37,21 @@ import org.json.JSONObject; public class TezJsonParser implements JsonParser { - JSONObject inputObject; Map stages; - PrintStream outputStream; protected final Log LOG; // the object that has been printed. public static Set printSet = new HashSet<>(); - // the vertex that should be inlined. + // the vertex that should be inlined. public static Map> inlineMap = new HashMap<>(); + public TezJsonParser() { super(); LOG = LogFactory.getLog(this.getClass().getName()); } - public void extractStagesAndPlans() throws JSONException, JsonParseException, - JsonMappingException, Exception, IOException { + + public void extractStagesAndPlans(JSONObject inputObject) throws JSONException, + JsonParseException, JsonMappingException, Exception, IOException { // extract stages this.stages = new HashMap(); JSONObject dependency = inputObject.getJSONObject("STAGE DEPENDENCIES"); @@ -77,8 +78,8 @@ public void extractStagesAndPlans() throws JSONException, JsonParseException, /** * @param indentFlag - * help to generate correct indent - * @return + * help to generate correct indent + * @return */ public static String prefixString(List indentFlag) { StringBuilder sb = new StringBuilder(); @@ -94,7 +95,7 @@ public static String prefixString(List indentFlag) { /** * @param indentFlag * @param tail - * help to generate correct indent with a specific tail + * help to generate correct indent with a specific tail * @return */ public static String prefixString(List indentFlag, String tail) { @@ -111,19 +112,19 @@ public static String prefixString(List indentFlag, String tail) { @Override public void print(JSONObject inputObject, PrintStream outputStream) throws Exception { - LOG.info("JsonParser is parsing\n" + inputObject.toString()); - this.inputObject = inputObject; - this.outputStream = outputStream; - this.extractStagesAndPlans(); + LOG.info("JsonParser is parsing:" + inputObject.toString()); + this.extractStagesAndPlans(inputObject); + PrintUtils.builder = new StringBuilder(); + PrintUtils.lineSeparator = System.getProperty("line.separator"); // print out the cbo info if (inputObject.has("cboInfo")) { - outputStream.println(inputObject.getString("cboInfo")); - outputStream.println(); + PrintUtils.println(inputObject.getString("cboInfo")); + PrintUtils.println(); } // print out the vertex dependency in root stage for (Stage candidate : this.stages.values()) { if (candidate.tezStageDependency != null && candidate.tezStageDependency.size() > 0) { - outputStream.println("Vertex dependency in root stage"); + PrintUtils.println("Vertex dependency in root stage"); for (Entry> entry : candidate.tezStageDependency.entrySet()) { StringBuilder sb = new StringBuilder(); sb.append(entry.getKey().name); @@ -137,18 +138,19 @@ public void print(JSONObject inputObject, PrintStream outputStream) throws Excep } sb.append(connection.from.name + " (" + connection.type + ")"); } - outputStream.println(sb.toString()); + PrintUtils.println(sb.toString()); } - outputStream.println(); + PrintUtils.println(); } } List indentFlag = new ArrayList<>(); // print out all the stages that have no childStages. for (Stage candidate : this.stages.values()) { if (candidate.childStages.isEmpty()) { - candidate.print(outputStream, indentFlag); + candidate.print(indentFlag); } } + outputStream.println(PrintUtils.builder.toString()); } public static void addInline(Op op, Connection connection) { @@ -161,10 +163,11 @@ public static void addInline(Op op, Connection connection) { list.add(connection); } } + public static boolean isInline(Vertex v) { - for(List list : inlineMap.values()){ + for (List list : inlineMap.values()) { for (Connection connection : list) { - if(connection.from.equals(v)){ + if (connection.from.equals(v)) { return true; } } diff --git a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java index 9b3405e..161c14d 100644 --- a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java +++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.common.jsonexplain.tez; import java.io.IOException; -import java.io.PrintStream; import java.util.ArrayList; import java.util.List; @@ -101,7 +100,7 @@ else if (key.equals("Join:")) { mergeJoinDummyVertexs.add(v); } } else { - throw new Exception("unsupported operator tree in vertex " + this.name); + throw new Exception("Unsupported operator tree in vertex " + this.name); } } } @@ -171,24 +170,24 @@ Op extractOp(JSONObject operator) throws JSONException, JsonParseException, Json } } - public void print(PrintStream out, List indentFlag, String type, Vertex callingVertex) + public void print(List indentFlag, String type, Vertex callingVertex) throws JSONException, Exception { // print vertexname if (TezJsonParser.printSet.contains(this) && !hasMultiReduceOp) { if (type != null) { - out.println(TezJsonParser.prefixString(indentFlag, "|<-") + PrintUtils.println(TezJsonParser.prefixString(indentFlag, "|<-") + " Please refer to the previous " + this.name + " [" + type + "]"); } else { - out.println(TezJsonParser.prefixString(indentFlag, "|<-") + PrintUtils.println(TezJsonParser.prefixString(indentFlag, "|<-") + " Please refer to the previous " + this.name); } return; } TezJsonParser.printSet.add(this); if (type != null) { - out.println(TezJsonParser.prefixString(indentFlag, "|<-") + this.name + " [" + type + "]"); + PrintUtils.println(TezJsonParser.prefixString(indentFlag, "|<-") + this.name + " [" + type + "]"); } else if (this.name != null) { - out.println(TezJsonParser.prefixString(indentFlag) + this.name); + PrintUtils.println(TezJsonParser.prefixString(indentFlag) + this.name); } // print operators if (hasMultiReduceOp && !callingVertex.union) { @@ -200,7 +199,7 @@ public void print(PrintStream out, List indentFlag, String type, Vertex } } if (choose != null) { - choose.print(out, indentFlag, false); + choose.print(indentFlag, false); } else { throw new Exception("Can not find the right reduce output operator for vertex " + this.name); } @@ -208,9 +207,9 @@ public void print(PrintStream out, List indentFlag, String type, Vertex for (Op op : this.rootOps) { // dummy vertex is treated as a branch of a join operator if (this.dummy) { - op.print(out, indentFlag, true); + op.print(indentFlag, true); } else { - op.print(out, indentFlag, false); + op.print(indentFlag, false); } } } @@ -225,7 +224,7 @@ public void print(PrintStream out, List indentFlag, String type, Vertex } else { unionFlag.add(false); } - connection.from.print(out, unionFlag, connection.type, this); + connection.from.print(unionFlag, connection.type, this); } } } diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 27f68df..503d718 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1705,7 +1705,7 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { HIVE_LOG_EXPLAIN_OUTPUT("hive.log.explain.output", false, "Whether to log explain output for every query.\n" + "When enabled, will log EXPLAIN EXTENDED output for the query at INFO log4j log level."), - HIVE_EXPLAIN_USER("hive.explain.user", false, + HIVE_EXPLAIN_USER("hive.explain.user", true, "Whether to show explain result at user level.\n" + "When enabled, will log EXPLAIN output for the query at user level."), diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index b9f39fb..9e8ea65 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -321,6 +321,7 @@ minitez.query.files=bucket_map_join_tez1.q,\ dynamic_partition_pruning_2.q,\ explainuser_1.q,\ explainuser_2.q,\ + explainuser_3.q,\ hybridgrace_hashjoin_1.q,\ hybridgrace_hashjoin_2.q,\ mapjoin_decimal.q,\ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 35c4cfc..cdac9e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -39,6 +39,8 @@ import java.util.Set; import java.util.TreeMap; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory; @@ -49,7 +51,6 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; -import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.ExplainWork; @@ -76,9 +77,11 @@ public static final String EXPL_COLUMN_NAME = "Explain"; private final Set> visitedOps = new HashSet>(); private boolean isLogical = false; + protected final Log LOG; public ExplainTask() { super(); + LOG = LogFactory.getLog(this.getClass().getName()); } /* @@ -288,28 +291,27 @@ public int execute(DriverContext driverContext) { JSONObject jsonDependencies = getJSONDependencies(work); out.print(jsonDependencies); } else { - if (work.getDependency()) { - JSONObject jsonDependencies = getJSONDependencies(work); - out.print(jsonDependencies); + if (work.isUserLevelExplain()) { + // Because of the implementation of the JsonParserFactory, we are sure + // that we can get a TezJsonParser. + JsonParser jsonParser = JsonParserFactory.getParser(conf); + work.setFormatted(true); + JSONObject jsonPlan = getJSONPlan(out, work); + if (work.getCboInfo() != null) { + jsonPlan.put("cboInfo", work.getCboInfo()); + } + try { + jsonParser.print(jsonPlan, out); + } catch (Exception e) { + // if there is anything wrong happen, we bail out. + LOG.info("Running explain user level has problem: " + e.toString()); + work.setFormatted(false); + jsonPlan = getJSONPlan(out, work); + } } else { - if (work.isUserLevelExplain()) { - JsonParser jsonParser = JsonParserFactory.getParser(conf); - if (jsonParser != null) { - work.setFormatted(true); - JSONObject jsonPlan = getJSONPlan(out, work); - if (work.getCboInfo() != null) { - jsonPlan.put("cboInfo", work.getCboInfo()); - } - jsonParser.print(jsonPlan, out); - } else { - throw new SemanticException( - "Hive UserLevelExplain only supports tez engine right now."); - } - } else { - JSONObject jsonPlan = getJSONPlan(out, work); - if (work.isFormatted()) { - out.print(jsonPlan); - } + JSONObject jsonPlan = getJSONPlan(out, work); + if (work.isFormatted()) { + out.print(jsonPlan); } } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java index 3fbc8de..66d1546 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java @@ -91,8 +91,13 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { pCtx = ((SemanticAnalyzer)sem).getParseContext(); } - boolean userLevelExplain = !extended && !formatted && !dependency && !logical && !authorize - && HiveConf.getBoolVar(ctx.getConf(), HiveConf.ConfVars.HIVE_EXPLAIN_USER); + boolean userLevelExplain = !extended + && !formatted + && !dependency + && !logical + && !authorize + && (HiveConf.getBoolVar(ctx.getConf(), HiveConf.ConfVars.HIVE_EXPLAIN_USER) && HiveConf + .getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")); ExplainWork work = new ExplainWork(ctx.getResFile(), pCtx, tasks, diff --git a/ql/src/test/queries/clientpositive/explainuser_3.q b/ql/src/test/queries/clientpositive/explainuser_3.q new file mode 100644 index 0000000..54548a0 --- /dev/null +++ b/ql/src/test/queries/clientpositive/explainuser_3.q @@ -0,0 +1,81 @@ +explain select key, value +FROM srcpart LATERAL VIEW explode(array(1,2,3)) myTable AS myCol; + +explain show tables; + +explain create database newDB location "/tmp/"; + +create database newDB location "/tmp/"; + +explain describe database extended newDB; + +describe database extended newDB; + +explain use newDB; + +use newDB; + +create table tab (name string); + +explain alter table tab rename to newName; + +explain drop table tab; + +drop table tab; + +explain use default; + +use default; + +drop database newDB; + +explain analyze table src compute statistics; + +explain analyze table src compute statistics for columns; + +explain +CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x)); + +CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x)); + +EXPLAIN SELECT SIGMOID(2) FROM src LIMIT 1; +explain DROP TEMPORARY MACRO SIGMOID; +DROP TEMPORARY MACRO SIGMOID; + +explain create table src_autho_test as select * from src; +create table src_autho_test as select * from src; + +set hive.security.authorization.enabled=true; + +explain grant select on table src_autho_test to user hive_test_user; +grant select on table src_autho_test to user hive_test_user; + +explain show grant user hive_test_user on table src_autho_test; +explain show grant user hive_test_user on table src_autho_test(key); + +select key from src_autho_test order by key limit 20; + +explain revoke select on table src_autho_test from user hive_test_user; + +explain grant select(key) on table src_autho_test to user hive_test_user; + +explain revoke select(key) on table src_autho_test from user hive_test_user; + +explain +create role sRc_roLE; + +create role sRc_roLE; + +explain +grant role sRc_roLE to user hive_test_user; + +grant role sRc_roLE to user hive_test_user; + +explain show role grant user hive_test_user; + +explain drop role sRc_roLE; +drop role sRc_roLE; + +set hive.security.authorization.enabled=false; +drop table src_autho_test; + diff --git a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out new file mode 100644 index 0000000..56eb32d --- /dev/null +++ b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out @@ -0,0 +1,394 @@ +PREHOOK: query: explain select key, value +FROM srcpart LATERAL VIEW explode(array(1,2,3)) myTable AS myCol +PREHOOK: type: QUERY +POSTHOOK: query: explain select key, value +FROM srcpart LATERAL VIEW explode(array(1,2,3)) myTable AS myCol +POSTHOOK: type: QUERY +Plan not optimized by CBO. + +Stage-0 + Fetch Operator + limit:-1 + Select Operator [SEL_6] + outputColumnNames:["_col0","_col1"] + Lateral View Join Operator [LVJ_5] + outputColumnNames:["_col0","_col1","_col7"] + Select Operator [SEL_2] + outputColumnNames:["key","value"] + Lateral View Forward [LVF_1] + TableScan [TS_0] + alias:srcpart + Select Operator [SEL_6] + outputColumnNames:["_col0","_col1"] + Lateral View Join Operator [LVJ_5] + outputColumnNames:["_col0","_col1","_col7"] + UDTF Operator [UDTF_4] + function name:explode + Select Operator [SEL_3] + outputColumnNames:["_col0"] + Please refer to the previous Lateral View Forward [LVF_1] + +PREHOOK: query: explain show tables +PREHOOK: type: SHOWTABLES +POSTHOOK: query: explain show tables +POSTHOOK: type: SHOWTABLES +Stage-1 + Fetch Operator + limit:-1 + Stage-0 + Show Table Operator: + database name:default + +#### A masked pattern was here #### +PREHOOK: type: CREATEDATABASE +#### A masked pattern was here #### +POSTHOOK: type: CREATEDATABASE +Stage-0 + +#### A masked pattern was here #### +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:newDB +#### A masked pattern was here #### +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:newDB +#### A masked pattern was here #### +PREHOOK: query: explain describe database extended newDB +PREHOOK: type: DESCDATABASE +POSTHOOK: query: explain describe database extended newDB +POSTHOOK: type: DESCDATABASE +Stage-1 + Fetch Operator + limit:-1 + Stage-0 + +PREHOOK: query: describe database extended newDB +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:newdb +POSTHOOK: query: describe database extended newDB +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:newdb +newdb location/in/test hive_test_user USER +PREHOOK: query: explain use newDB +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: explain use newDB +POSTHOOK: type: SWITCHDATABASE +Stage-0 + +PREHOOK: query: use newDB +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:newdb +POSTHOOK: query: use newDB +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:newdb +PREHOOK: query: create table tab (name string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:newdb +PREHOOK: Output: newDB@tab +POSTHOOK: query: create table tab (name string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:newdb +POSTHOOK: Output: newDB@tab +PREHOOK: query: explain alter table tab rename to newName +PREHOOK: type: ALTERTABLE_RENAME +POSTHOOK: query: explain alter table tab rename to newName +POSTHOOK: type: ALTERTABLE_RENAME +Stage-0 + Alter Table Operator: + new name:newDB.newName + old name:newDB.tab + type:rename + +PREHOOK: query: explain drop table tab +PREHOOK: type: DROPTABLE +POSTHOOK: query: explain drop table tab +POSTHOOK: type: DROPTABLE +Stage-0 + Drop Table Operator: + table:tab + +PREHOOK: query: drop table tab +PREHOOK: type: DROPTABLE +PREHOOK: Input: newdb@tab +PREHOOK: Output: newdb@tab +POSTHOOK: query: drop table tab +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: newdb@tab +POSTHOOK: Output: newdb@tab +PREHOOK: query: explain use default +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: explain use default +POSTHOOK: type: SWITCHDATABASE +Stage-0 + +PREHOOK: query: use default +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:default +POSTHOOK: query: use default +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:default +PREHOOK: query: drop database newDB +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:newdb +PREHOOK: Output: database:newdb +POSTHOOK: query: drop database newDB +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:newdb +POSTHOOK: Output: database:newdb +PREHOOK: query: explain analyze table src compute statistics +PREHOOK: type: QUERY +POSTHOOK: query: explain analyze table src compute statistics +POSTHOOK: type: QUERY +Stage-2 + Stats-Aggr Operator + Stage-0 + Map 1 + TableScan [TS_0] + alias:src + Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE + +PREHOOK: query: explain analyze table src compute statistics for columns +PREHOOK: type: QUERY +POSTHOOK: query: explain analyze table src compute statistics for columns +POSTHOOK: type: QUERY +Vertex dependency in root stage +Reducer 2 <- Map 1 (SIMPLE_EDGE) + +Stage-2 + Column Stats Work{} + Stage-0 + Reducer 2 + File Output Operator [FS_6] + compressed:false + Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + table:{"serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe","input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"} + Group By Operator [GBY_4] + | aggregations:["compute_stats(VALUE._col0)","compute_stats(VALUE._col1)"] + | outputColumnNames:["_col0","_col1"] + | Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + |<-Map 1 [SIMPLE_EDGE] + Reduce Output Operator [RS_3] + sort order: + Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + value expressions:_col0 (type: struct), _col1 (type: struct) + Group By Operator [GBY_2] + aggregations:["compute_stats(key, 16)","compute_stats(value, 16)"] + outputColumnNames:["_col0","_col1"] + Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator [SEL_1] + outputColumnNames:["key","value"] + Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + TableScan [TS_0] + alias:src + Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + +PREHOOK: query: explain +CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x)) +PREHOOK: type: CREATEMACRO +POSTHOOK: query: explain +CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x)) +POSTHOOK: type: CREATEMACRO +Stage-0 + +PREHOOK: query: CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x)) +PREHOOK: type: CREATEMACRO +PREHOOK: Output: database:default +POSTHOOK: query: CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x)) +POSTHOOK: type: CREATEMACRO +POSTHOOK: Output: database:default +PREHOOK: query: EXPLAIN SELECT SIGMOID(2) FROM src LIMIT 1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT SIGMOID(2) FROM src LIMIT 1 +POSTHOOK: type: QUERY +Plan optimized by CBO. + +Stage-0 + Fetch Operator + limit:1 + Limit [LIM_2] + Number of rows:1 + Select Operator [SEL_1] + outputColumnNames:["_col0"] + TableScan [TS_0] + alias:src + +PREHOOK: query: explain DROP TEMPORARY MACRO SIGMOID +PREHOOK: type: DROPMACRO +POSTHOOK: query: explain DROP TEMPORARY MACRO SIGMOID +POSTHOOK: type: DROPMACRO +Stage-0 + +PREHOOK: query: DROP TEMPORARY MACRO SIGMOID +PREHOOK: type: DROPMACRO +PREHOOK: Output: database:default +POSTHOOK: query: DROP TEMPORARY MACRO SIGMOID +POSTHOOK: type: DROPMACRO +POSTHOOK: Output: database:default +PREHOOK: query: explain create table src_autho_test as select * from src +PREHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: query: explain create table src_autho_test as select * from src +POSTHOOK: type: CREATETABLE_AS_SELECT +Plan optimized by CBO. + +Stage-3 + Stats-Aggr Operator + Stage-4 + Create Table Operator: + columns:["key string","value string"] + input format:org.apache.hadoop.mapred.TextInputFormat + name:default.src_autho_test + output format:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + Stage-2 + Dependency Collection{} + Stage-1 + Map 1 + File Output Operator [FS_2] + compressed:false + Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + table:{"serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe","name:":"default.src_autho_test","input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"} + Select Operator [SEL_1] + outputColumnNames:["_col0","_col1"] + Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + TableScan [TS_0] + alias:src + Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Stage-0 + Move Operator + Please refer to the previous Stage-1 + +PREHOOK: query: create table src_autho_test as select * from src +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@src +PREHOOK: Output: database:default +PREHOOK: Output: default@src_autho_test +POSTHOOK: query: create table src_autho_test as select * from src +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@src +POSTHOOK: Output: database:default +POSTHOOK: Output: default@src_autho_test +PREHOOK: query: explain grant select on table src_autho_test to user hive_test_user +PREHOOK: type: GRANT_PRIVILEGE +POSTHOOK: query: explain grant select on table src_autho_test to user hive_test_user +POSTHOOK: type: GRANT_PRIVILEGE +Stage-0 + +PREHOOK: query: grant select on table src_autho_test to user hive_test_user +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@src_autho_test +POSTHOOK: query: grant select on table src_autho_test to user hive_test_user +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@src_autho_test +PREHOOK: query: explain show grant user hive_test_user on table src_autho_test +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: explain show grant user hive_test_user on table src_autho_test +POSTHOOK: type: SHOW_GRANT +Stage-1 + Fetch Operator + limit:-1 + Stage-0 + +PREHOOK: query: explain show grant user hive_test_user on table src_autho_test(key) +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: explain show grant user hive_test_user on table src_autho_test(key) +POSTHOOK: type: SHOW_GRANT +Stage-1 + Fetch Operator + limit:-1 + Stage-0 + +PREHOOK: query: select key from src_autho_test order by key limit 20 +PREHOOK: type: QUERY +PREHOOK: Input: default@src_autho_test +#### A masked pattern was here #### +POSTHOOK: query: select key from src_autho_test order by key limit 20 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_autho_test +#### A masked pattern was here #### +0 +0 +0 +10 +100 +100 +103 +103 +104 +104 +105 +11 +111 +113 +113 +114 +116 +118 +118 +119 +PREHOOK: query: explain revoke select on table src_autho_test from user hive_test_user +PREHOOK: type: REVOKE_PRIVILEGE +POSTHOOK: query: explain revoke select on table src_autho_test from user hive_test_user +POSTHOOK: type: REVOKE_PRIVILEGE +Stage-0 + +PREHOOK: query: explain grant select(key) on table src_autho_test to user hive_test_user +PREHOOK: type: GRANT_PRIVILEGE +POSTHOOK: query: explain grant select(key) on table src_autho_test to user hive_test_user +POSTHOOK: type: GRANT_PRIVILEGE +Stage-0 + +PREHOOK: query: explain revoke select(key) on table src_autho_test from user hive_test_user +PREHOOK: type: REVOKE_PRIVILEGE +POSTHOOK: query: explain revoke select(key) on table src_autho_test from user hive_test_user +POSTHOOK: type: REVOKE_PRIVILEGE +Stage-0 + +PREHOOK: query: explain +create role sRc_roLE +PREHOOK: type: CREATEROLE +POSTHOOK: query: explain +create role sRc_roLE +POSTHOOK: type: CREATEROLE +Stage-0 + +PREHOOK: query: create role sRc_roLE +PREHOOK: type: CREATEROLE +POSTHOOK: query: create role sRc_roLE +POSTHOOK: type: CREATEROLE +PREHOOK: query: explain +grant role sRc_roLE to user hive_test_user +PREHOOK: type: GRANT_ROLE +POSTHOOK: query: explain +grant role sRc_roLE to user hive_test_user +POSTHOOK: type: GRANT_ROLE +Stage-0 + +PREHOOK: query: grant role sRc_roLE to user hive_test_user +PREHOOK: type: GRANT_ROLE +POSTHOOK: query: grant role sRc_roLE to user hive_test_user +POSTHOOK: type: GRANT_ROLE +PREHOOK: query: explain show role grant user hive_test_user +PREHOOK: type: SHOW_ROLE_GRANT +POSTHOOK: query: explain show role grant user hive_test_user +POSTHOOK: type: SHOW_ROLE_GRANT +Stage-1 + Fetch Operator + limit:-1 + Stage-0 + +PREHOOK: query: explain drop role sRc_roLE +PREHOOK: type: DROPROLE +POSTHOOK: query: explain drop role sRc_roLE +POSTHOOK: type: DROPROLE +Stage-0 + +PREHOOK: query: drop role sRc_roLE +PREHOOK: type: DROPROLE +POSTHOOK: query: drop role sRc_roLE +POSTHOOK: type: DROPROLE +PREHOOK: query: drop table src_autho_test +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@src_autho_test +PREHOOK: Output: default@src_autho_test +POSTHOOK: query: drop table src_autho_test +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@src_autho_test +POSTHOOK: Output: default@src_autho_test