diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index b4bed7f..cf4d1c0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -98,6 +98,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.VariableSubstitution; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.OperatorDesc; @@ -532,7 +533,7 @@ private String getExplainOutput(BaseSemanticAnalyzer sem, QueryPlan plan, PrintStream ps = new PrintStream(baos); try { List> rootTasks = sem.getRootTasks(); - task.getJSONPlan(ps, astStringTree, rootTasks, sem.getFetchTask(), false, true, true); + task.getJSONPlan(ps, astStringTree, rootTasks, sem.getFetchTask(), false, Level.EXTENDED, true); ret = baos.toString(); } catch (Exception e) { LOG.warn("Exception generating explain output: " + e, e); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 149f911..507d624 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.ExplainWork; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.OperatorDesc; @@ -148,7 +149,7 @@ public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws E out.print("LOGICAL PLAN:"); } JSONObject jsonPlan = outputMap(work.getParseContext().getTopOps(), true, - out, jsonOutput, work.getExtended(), 0); + out, work.getLevel(), jsonOutput, 0); if (out != null) { out.println(); } @@ -165,11 +166,11 @@ public JSONObject getJSONLogicalPlan(PrintStream out, ExplainWork work) throws E public JSONObject getJSONPlan(PrintStream out, ExplainWork work) throws Exception { return getJSONPlan(out, work.getAstStringTree(), work.getRootTasks(), work.getFetchTask(), - work.isFormatted(), work.getExtended(), work.isAppendTaskType()); + work.isFormatted(), work.getLevel(), work.isAppendTaskType()); } public JSONObject getJSONPlan(PrintStream out, String ast, List> tasks, Task fetchTask, - boolean jsonOutput, boolean isExtended, boolean appendTaskType) throws Exception { + boolean jsonOutput, Level level, boolean appendTaskType) throws Exception { // If the user asked for a formatted output, dump the json output // in the output stream @@ -180,7 +181,7 @@ public JSONObject getJSONPlan(PrintStream out, String ast, List> tasks, } // Print out the parse AST - if (ast != null && isExtended) { + if (ast != null && level.ge(Level.DEFAULT)) { String jsonAST = outputAST(ast, out, jsonOutput, 0); if (out != null) { out.println(); @@ -213,7 +214,7 @@ public JSONObject getJSONPlan(PrintStream out, String ast, List> tasks, // Go over all the tasks and dump out the plans JSONObject jsonPlan = outputStagePlans(out, ordered, - jsonOutput, isExtended); + jsonOutput, level); if (jsonOutput) { outJSONObject.put("STAGE PLANS", jsonPlan); @@ -374,7 +375,7 @@ private static String indentString(int indent) { } private JSONObject outputMap(Map mp, boolean hasHeader, PrintStream out, - boolean extended, boolean jsonOutput, int indent) throws Exception { + Level level, boolean jsonOutput, int indent) throws Exception { TreeMap tree = new TreeMap(); tree.putAll(mp); @@ -482,7 +483,7 @@ else if (ent.getValue() != null) { out.println(); } JSONObject jsonOut = outputPlan(ent.getValue(), out, - extended, jsonOutput, jsonOutput ? 0 : indent + 2); + level, jsonOutput, jsonOutput ? 0 : indent + 2); if (jsonOutput) { json.put(ent.getKey().toString(), jsonOut); } @@ -498,7 +499,7 @@ else if (ent.getValue() != null) { } private JSONArray outputList(List l, PrintStream out, boolean hasHeader, - boolean extended, boolean jsonOutput, int indent) throws Exception { + Level level, boolean jsonOutput, int indent) throws Exception { boolean first_el = true; boolean nl = false; @@ -521,7 +522,7 @@ private JSONArray outputList(List l, PrintStream out, boolean hasHeader, if (first_el && (out != null) && hasHeader) { out.println(); } - JSONObject jsonOut = outputPlan(o, out, extended, + JSONObject jsonOut = outputPlan(o, out, level, jsonOutput, jsonOutput ? 0 : (hasHeader ? indent + 2 : indent)); if (jsonOutput) { outputArray.put(jsonOut); @@ -553,20 +554,20 @@ private boolean isPrintable(Object val) { } private JSONObject outputPlan(Object work, - PrintStream out, boolean extended, boolean jsonOutput, int indent) throws Exception { - return outputPlan(work, out, extended, jsonOutput, indent, ""); + PrintStream out, Level level, boolean jsonOutput, int indent) throws Exception { + return outputPlan(work, out, level, jsonOutput, indent, ""); } private JSONObject outputPlan(Object work, PrintStream out, - boolean extended, boolean jsonOutput, int indent, String appendToHeader) throws Exception { + Level level, boolean jsonOutput, int indent, String appendToHeader) throws Exception { // Check if work has an explain annotation Annotation note = AnnotationUtils.getAnnotation(work.getClass(), Explain.class); String keyJSONObject = null; - + if (note instanceof Explain) { Explain xpl_note = (Explain) note; - if (extended || xpl_note.normalExplain()) { + if (level.ge(xpl_note.explainLevel())) { keyJSONObject = xpl_note.displayName(); if (out != null) { out.print(indentString(indent)); @@ -587,7 +588,7 @@ private JSONObject outputPlan(Object work, PrintStream out, (Operator) work; if (operator.getConf() != null) { String appender = isLogical ? " (" + operator.getOperatorId() + ")" : ""; - JSONObject jsonOut = outputPlan(operator.getConf(), out, extended, + JSONObject jsonOut = outputPlan(operator.getConf(), out, level, jsonOutput, jsonOutput ? 0 : indent, appender); if (jsonOutput) { json = jsonOut; @@ -599,7 +600,7 @@ private JSONObject outputPlan(Object work, PrintStream out, if (operator.getChildOperators() != null) { int cindent = jsonOutput ? 0 : indent + 2; for (Operator op : operator.getChildOperators()) { - JSONObject jsonOut = outputPlan(op, out, extended, jsonOutput, cindent); + JSONObject jsonOut = outputPlan(op, out, level, jsonOutput, cindent); if (jsonOutput) { ((JSONObject)json.get(JSONObject.getNames(json)[0])).accumulate("children", jsonOut); } @@ -624,7 +625,7 @@ private JSONObject outputPlan(Object work, PrintStream out, if (note instanceof Explain) { Explain xpl_note = (Explain) note; - if (extended || xpl_note.normalExplain()) { + if (level.ge(xpl_note.explainLevel())) { Object val = null; try { @@ -685,7 +686,7 @@ private JSONObject outputPlan(Object work, PrintStream out, out.print(header); } - JSONObject jsonOut = outputMap(mp, !skipHeader && !emptyHeader, out, extended, jsonOutput, ind); + JSONObject jsonOut = outputMap(mp, !skipHeader && !emptyHeader, out, level, jsonOutput, ind); if (jsonOutput && !mp.isEmpty()) { json.put(header, jsonOut); } @@ -699,7 +700,7 @@ private JSONObject outputPlan(Object work, PrintStream out, out.print(header); } - JSONArray jsonOut = outputList(l, out, !skipHeader && !emptyHeader, extended, jsonOutput, ind); + JSONArray jsonOut = outputList(l, out, !skipHeader && !emptyHeader, level, jsonOutput, ind); if (jsonOutput && !l.isEmpty()) { json.put(header, jsonOut); @@ -713,7 +714,7 @@ private JSONObject outputPlan(Object work, PrintStream out, if (!skipHeader && out != null) { out.println(header); } - JSONObject jsonOut = outputPlan(val, out, extended, jsonOutput, ind); + JSONObject jsonOut = outputPlan(val, out, level, jsonOutput, ind); if (jsonOutput) { if (!skipHeader) { json.put(header, jsonOut); @@ -762,7 +763,7 @@ private boolean shouldPrint(Explain exp, Object val) { } private JSONObject outputPlan(Task task, - PrintStream out, JSONObject parentJSON, boolean extended, + PrintStream out, JSONObject parentJSON, Level level, boolean jsonOutput, int indent) throws Exception { if (out != null) { @@ -774,7 +775,7 @@ private JSONObject outputPlan(Task task, // Start by getting the work part of the task and call the output plan for // the work - JSONObject jsonOutputPlan = outputPlan(task.getWork(), out, extended, + JSONObject jsonOutputPlan = outputPlan(task.getWork(), out, level, jsonOutput, jsonOutput ? 0 : indent + 2); if (out != null) { @@ -909,7 +910,7 @@ public JSONObject outputDependencies(PrintStream out, boolean jsonOutput, } public JSONObject outputStagePlans(PrintStream out, List tasks, - boolean jsonOutput, boolean isExtended) + boolean jsonOutput, Level level) throws Exception { if (out != null) { @@ -918,7 +919,7 @@ public JSONObject outputStagePlans(PrintStream out, List tasks, JSONObject json = jsonOutput ? new JSONObject() : null; for (Task task : tasks) { - outputPlan(task, out, json, isExtended, jsonOutput, 2); + outputPlan(task, out, json, level, jsonOutput, 2); } return jsonOutput ? json : null; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java index 513a2fa..8b46318 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; @@ -115,7 +116,7 @@ public void run() { String query = plan.getQueryStr(); List> rootTasks = plan.getRootTasks(); JSONObject explainPlan = explain.getJSONPlan(null, null, rootTasks, - plan.getFetchTask(), true, false, false); + plan.getFetchTask(), true, Level.DEFAULT, false); fireAndForget(conf, createPreHookEvent(queryId, query, explainPlan, queryStartTime, user, numMrJobs, numTezJobs, opId)); break; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java index 095afd4..b7d95fc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat; import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; import org.apache.hadoop.mapred.Mapper; @@ -88,7 +89,7 @@ public boolean isGatheringStats() { /** * @return the aggKey */ - @Explain(displayName = "Stats Aggregation Key Prefix", normalExplain = false) + @Explain(displayName = "Stats Aggregation Key Prefix", explainLevel = Level.EXTENDED) public String getAggKey() { return aggKey; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java index 38b6d96..8723af9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.ExplainWork; /** @@ -45,7 +46,7 @@ public ExplainSemanticAnalyzer(HiveConf conf) throws SemanticException { @Override public void analyzeInternal(ASTNode ast) throws SemanticException { - boolean extended = false; + Level level = Level.DEFAULT; boolean formatted = false; boolean dependency = false; boolean logical = false; @@ -55,7 +56,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { if (explainOptions == HiveParser.KW_FORMATTED) { formatted = true; } else if (explainOptions == HiveParser.KW_EXTENDED) { - extended = true; + level = Level.EXTENDED; } else if (explainOptions == HiveParser.KW_DEPENDENCY) { dependency = true; } else if (explainOptions == HiveParser.KW_LOGICAL) { @@ -97,7 +98,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { fetchTask, input.dump(), sem, - extended, + level, formatted, dependency, logical, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java index f436bc0..6f5f221 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java @@ -30,6 +30,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.ql.exec.BucketMatcher; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * was inner class of MapreLocalWork. context for bucket mapjoin (or smb join) @@ -130,7 +131,7 @@ public void setBucketMatcherClass( this.bucketMatcherClass = bucketMatcherClass; } - @Explain(displayName = "Alias Bucket File Name Mapping", normalExplain = false) + @Explain(displayName = "Alias Bucket File Name Mapping", explainLevel = Level.EXTENDED) public Map>> getAliasBucketFileNameMapping() { return aliasBucketFileNameMapping; } @@ -149,7 +150,7 @@ public String toString() { } } - @Explain(displayName = "Alias Bucket Base File Name Mapping", normalExplain = false) + @Explain(displayName = "Alias Bucket Base File Name Mapping", explainLevel = Level.EXTENDED) public Map>> getAliasBucketBaseFileNameMapping() { return aliasBucketBaseFileNameMapping; } @@ -159,7 +160,7 @@ public void setAliasBucketBaseFileNameMapping( this.aliasBucketBaseFileNameMapping = aliasBucketBaseFileNameMapping; } - @Explain(displayName = "Alias Bucket Output File Name Mapping", normalExplain = false) + @Explain(displayName = "Alias Bucket Output File Name Mapping", explainLevel = Level.EXTENDED) public Map getBucketFileNameMapping() { return bucketFileNameMapping; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java index a44c8e8..90db41c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java @@ -20,6 +20,8 @@ import java.io.Serializable; import java.util.List; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + /** * Contains the information needed to persist column level statistics */ @@ -51,7 +53,7 @@ public void setTableName(String tableName) { this.tableName = tableName; } - @Explain(displayName = "Is Table Level Stats", normalExplain=false) + @Explain(displayName = "Is Table Level Stats", explainLevel = Level.EXTENDED) public boolean isTblLevel() { return isTblLevel; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java index 3c0ed2a..62fa6bd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java @@ -21,6 +21,7 @@ import java.io.Serializable; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * DescDatabaseDesc. @@ -92,7 +93,7 @@ public void setDatabaseName(String db) { /** * @return the resFile */ - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFile() { return resFile; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java index 814ad73..16f2c02 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java @@ -21,6 +21,7 @@ import java.io.Serializable; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * DescFunctionDesc. @@ -98,7 +99,7 @@ public void setName(String name) { /** * @return the resFile */ - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFile() { return resFile; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java index eefd4d4..e932a18 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java @@ -22,6 +22,7 @@ import java.util.Map; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * DescTableDesc. @@ -178,7 +179,7 @@ public void setPartSpecs(Map partSpec) { /** * @return the resFile */ - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFile() { return resFile; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java index a3408a0..51bb607 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java @@ -20,6 +20,8 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.util.HashMap; +import java.util.Map; /** * Explain. @@ -27,9 +29,20 @@ */ @Retention(RetentionPolicy.RUNTIME) public @interface Explain { + public enum Level { + USER(0), DEFAULT(1), EXTENDED(2); + private int level; + private Level(final int level) { + this.level = level; + } + + public boolean ge(Level level) { + return this.level >= level.level; + } + }; String displayName() default ""; - boolean normalExplain() default true; + Level explainLevel() default Level.DEFAULT; boolean displayOnlyOnTrue() default false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java index f258d51..f05da81 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * ExplainWork. @@ -43,7 +44,7 @@ private HashSet inputs; private ParseContext pCtx; - boolean extended; + Level level; boolean formatted; boolean dependency; boolean logical; @@ -63,7 +64,7 @@ public ExplainWork(Path resFile, Task fetchTask, String astStringTree, BaseSemanticAnalyzer analyzer, - boolean extended, + Level level, boolean formatted, boolean dependency, boolean logical, @@ -74,7 +75,7 @@ public ExplainWork(Path resFile, this.astStringTree = astStringTree; this.analyzer = analyzer; this.inputs = analyzer.getInputs(); - this.extended = extended; + this.level = level; this.formatted = formatted; this.dependency = dependency; this.logical = logical; @@ -122,12 +123,12 @@ public void setInputs(HashSet inputs) { this.inputs = inputs; } - public boolean getExtended() { - return extended; + public Level getLevel() { + return level; } - public void setExtended(boolean extended) { - this.extended = extended; + public void setLevel(Level level) { + this.level = level; } public boolean getDependency() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java index ef5a655..ae7b7e6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.parse.SplitSample; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; /** @@ -185,7 +186,7 @@ public void setPartDir(ArrayList partDir) { * * @return the partDesc array list */ - @Explain(displayName = "Partition Description", normalExplain = false) + @Explain(displayName = "Partition Description", explainLevel = Level.EXTENDED) public ArrayList getPartDescOrderedByPartDir() { ArrayList partDescOrdered = partDesc; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java index 83ebfa3..ffecc5d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java @@ -24,6 +24,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * FileSinkDesc. @@ -151,7 +152,7 @@ public Object clone() throws CloneNotSupportedException { return (Object) ret; } - @Explain(displayName = "directory", normalExplain = false) + @Explain(displayName = "directory", explainLevel = Level.EXTENDED) public Path getDirName() { return dirName; } @@ -182,7 +183,7 @@ public void setCompressed(boolean compressed) { this.compressed = compressed; } - @Explain(displayName = "GlobalTableId", normalExplain = false) + @Explain(displayName = "GlobalTableId", explainLevel = Level.EXTENDED) public int getDestTableId() { return destTableId; } @@ -210,7 +211,7 @@ public void setCompressType(String intermediateCompressType) { /** * @return the multiFileSpray */ - @Explain(displayName = "MultiFileSpray", normalExplain = false) + @Explain(displayName = "MultiFileSpray", explainLevel = Level.EXTENDED) public boolean isMultiFileSpray() { return multiFileSpray; } @@ -248,7 +249,7 @@ public void setCanBeMerged(boolean canBeMerged) { /** * @return the totalFiles */ - @Explain(displayName = "TotalFiles", normalExplain = false) + @Explain(displayName = "TotalFiles", explainLevel = Level.EXTENDED) public int getTotalFiles() { return totalFiles; } @@ -277,7 +278,7 @@ public void setPartitionCols(ArrayList partitionCols) { /** * @return the numFiles */ - @Explain(displayName = "NumFilesPerFileSink", normalExplain = false) + @Explain(displayName = "NumFilesPerFileSink", explainLevel = Level.EXTENDED) public int getNumFiles() { return numFiles; } @@ -301,7 +302,7 @@ public void setStaticSpec(String staticSpec) { this.staticSpec = staticSpec; } - @Explain(displayName = "Static Partition Specification", normalExplain = false) + @Explain(displayName = "Static Partition Specification", explainLevel = Level.EXTENDED) public String getStaticSpec() { return staticSpec; } @@ -310,7 +311,7 @@ public void setGatherStats(boolean gatherStats) { this.gatherStats = gatherStats; } - @Explain(displayName = "GatherStats", normalExplain = false) + @Explain(displayName = "GatherStats", explainLevel = Level.EXTENDED) public boolean isGatherStats() { return gatherStats; } @@ -326,7 +327,7 @@ public boolean isGatherStats() { * will be aggregated. * @return key prefix used for stats publishing and aggregation. */ - @Explain(displayName = "Stats Publishing Key Prefix", normalExplain = false) + @Explain(displayName = "Stats Publishing Key Prefix", explainLevel = Level.EXTENDED) public String getStatsAggPrefix() { // dirName uniquely identifies destination directory of a FileSinkOperator. // If more than one FileSinkOperator write to the same partition, this dirName diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java index 22fd29e..4afd283 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java @@ -20,6 +20,8 @@ import java.util.List; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + /** * FilterDesc. @@ -116,7 +118,7 @@ public void setPredicate( this.predicate = predicate; } - @Explain(displayName = "isSamplingPred", normalExplain = false) + @Explain(displayName = "isSamplingPred", explainLevel = Level.EXTENDED) public boolean getIsSamplingPred() { return isSamplingPred; } @@ -133,7 +135,7 @@ public void setSampleDescr(final SampleDesc sampleDescr) { this.sampleDescr = sampleDescr; } - @Explain(displayName = "sampleDesc", normalExplain = false) + @Explain(displayName = "sampleDesc", explainLevel = Level.EXTENDED) public String getSampleDescExpr() { return sampleDescr == null ? null : sampleDescr.toString(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java index 03ef704..3f745be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java @@ -25,6 +25,7 @@ import java.util.Map; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * Map Join operator Descriptor implementation. @@ -285,7 +286,7 @@ public void setFilterMap(int[][] filterMap) { } @Override - @Explain(displayName = "filter mappings", normalExplain = false) + @Explain(displayName = "filter mappings", explainLevel = Level.EXTENDED) public Map getFilterMapString() { return toCompactString(filterMap); } @@ -328,7 +329,7 @@ public void setKeys(Map> keys) { /** * @return the position of the big table not in memory */ - @Explain(displayName = "Position of Big Table", normalExplain = false) + @Explain(displayName = "Position of Big Table", explainLevel = Level.EXTENDED) public int getPosBigTable() { return posBigTable; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java index 990608a..181dc2f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java @@ -29,6 +29,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.parse.QBJoinTree; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * Join operator Descriptor implementation. @@ -453,7 +454,7 @@ public void setFilterMap(int[][] filterMap) { this.filterMap = filterMap; } - @Explain(displayName = "filter mappings", normalExplain = false) + @Explain(displayName = "filter mappings", explainLevel = Level.EXTENDED) public Map getFilterMapString() { return toCompactString(filterMap); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java index 68e2afc..18bccd9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java @@ -21,6 +21,7 @@ import java.io.Serializable; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * LoadDesc. @@ -37,7 +38,7 @@ public LoadDesc(final Path sourcePath) { this.sourcePath = sourcePath; } - @Explain(displayName = "source", normalExplain = false) + @Explain(displayName = "source", explainLevel = Level.EXTENDED) public Path getSourcePath() { return sourcePath; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java index 9fdd417..906b05e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java @@ -28,6 +28,8 @@ import java.util.Map.Entry; import java.util.Set; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + /** * Map Join operator Descriptor implementation. * @@ -136,7 +138,7 @@ public void setParentToInput(Map parentToInput) { return parentKeyCounts; } - @Explain(displayName = "Estimated key counts", normalExplain = false) + @Explain(displayName = "Estimated key counts", explainLevel = Level.EXTENDED) public String getKeyCountsExplainDesc() { StringBuilder result = null; for (Map.Entry entry : parentKeyCounts.entrySet()) { @@ -220,7 +222,7 @@ public void setKeys(Map> keys) { /** * @return the position of the big table not in memory */ - @Explain(displayName = "Position of Big Table", normalExplain = false) + @Explain(displayName = "Position of Big Table", explainLevel = Level.EXTENDED) public int getPosBigTable() { return posBigTable; } @@ -310,7 +312,7 @@ public void setBigTablePartSpecToFileMapping(Map> partToFil this.bigTablePartSpecToFileMapping = partToFileMapping; } - @Explain(displayName = "BucketMapJoin", normalExplain = false, displayOnlyOnTrue = true) + @Explain(displayName = "BucketMapJoin", explainLevel = Level.EXTENDED, displayOnlyOnTrue = true) public boolean isBucketMapJoin() { return isBucketMapJoin; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java index f6616fb..bd3e377 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.BucketCol; import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.SortCol; import org.apache.hadoop.hive.ql.parse.SplitSample; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.mapred.JobConf; import com.google.common.collect.Interner; @@ -134,7 +135,7 @@ public MapWork(String name) { super(name); } - @Explain(displayName = "Path -> Alias", normalExplain = false) + @Explain(displayName = "Path -> Alias", explainLevel = Level.EXTENDED) public LinkedHashMap> getPathToAliases() { return pathToAliases; } @@ -155,7 +156,7 @@ public void setPathToAliases( * * @return */ - @Explain(displayName = "Truncated Path -> Alias", normalExplain = false) + @Explain(displayName = "Truncated Path -> Alias", explainLevel = Level.EXTENDED) public Map> getTruncatedPathToAliases() { Map> trunPathToAliases = new LinkedHashMap>(); @@ -170,7 +171,7 @@ public void setPathToAliases( return trunPathToAliases; } - @Explain(displayName = "Path -> Partition", normalExplain = false) + @Explain(displayName = "Path -> Partition", explainLevel = Level.EXTENDED) public LinkedHashMap getPathToPartitionInfo() { return pathToPartitionInfo; } @@ -240,7 +241,7 @@ public void setAliasToWork( this.aliasToWork = aliasToWork; } - @Explain(displayName = "Split Sample", normalExplain = false) + @Explain(displayName = "Split Sample", explainLevel = Level.EXTENDED) public HashMap getNameToSplitSample() { return nameToSplitSample; } @@ -467,12 +468,12 @@ public void mergingInto(MapWork mapWork) { mapWork.useBucketizedHiveInputFormat |= useBucketizedHiveInputFormat; } - @Explain(displayName = "Path -> Bucketed Columns", normalExplain = false) + @Explain(displayName = "Path -> Bucketed Columns", explainLevel = Level.EXTENDED) public Map> getBucketedColsByDirectory() { return bucketedColsByDirectory; } - @Explain(displayName = "Path -> Sorted Columns", normalExplain = false) + @Explain(displayName = "Path -> Sorted Columns", explainLevel = Level.EXTENDED) public Map> getSortedColsByDirectory() { return sortedColsByDirectory; } @@ -493,7 +494,7 @@ public void setSamplingType(int samplingType) { this.samplingType = samplingType; } - @Explain(displayName = "Sampling", normalExplain = false) + @Explain(displayName = "Sampling", explainLevel = Level.EXTENDED) public String getSamplingTypeString() { return samplingType == 1 ? "SAMPLING_ON_PREV_MR" : samplingType == 2 ? "SAMPLING_ON_START" : null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java index 316d306..95fbac4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * MapredLocalWork. @@ -138,7 +139,7 @@ public void deriveExplainAttributes() { } } - @Explain(displayName = "Bucket Mapjoin Context", normalExplain = false) + @Explain(displayName = "Bucket Mapjoin Context", explainLevel = Level.EXTENDED) public BucketMapJoinContext getBucketMapjoinContextExplain() { return bucketMapjoinContext != null && bucketMapjoinContext.getBucketFileNameMapping() != null ? bucketMapjoinContext : null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java index 503117d..0b98228 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeUtils; @@ -176,7 +177,7 @@ public Properties getProperties() { return properties; } - @Explain(displayName = "properties", normalExplain = false) + @Explain(displayName = "properties", explainLevel = Level.EXTENDED) public Map getPropertiesExplain() { return HiveStringUtils.getPropertiesExplain(getProperties()); } @@ -216,7 +217,7 @@ public String getOutputFileFormatClassName() { return getOutputFileFormatClass().getName(); } - @Explain(displayName = "base file name", normalExplain = false) + @Explain(displayName = "base file name", explainLevel = Level.EXTENDED) public String getBaseFileName() { return baseFileName; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java index 818a8e3..c35e62c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java @@ -21,6 +21,7 @@ import java.io.Serializable; import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.plan.Explain.Level; @Explain(displayName = "Principal") public class PrincipalDesc implements Serializable, Cloneable { @@ -50,7 +51,7 @@ public void setName(String name) { this.name = name; } - @Explain(displayName="type", normalExplain = false) + @Explain(displayName="type", explainLevel = Level.EXTENDED) public PrincipalType getType() { return type; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java index 28cb3ba..1148faa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java @@ -25,6 +25,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** @@ -245,7 +246,7 @@ public boolean isPartitioning() { return false; } - @Explain(displayName = "tag", normalExplain = false) + @Explain(displayName = "tag", explainLevel = Level.EXTENDED) public int getTag() { return tag; } @@ -262,7 +263,7 @@ public void setTopN(int topN) { this.topN = topN; } - @Explain(displayName = "TopN", normalExplain = false) + @Explain(displayName = "TopN", explainLevel = Level.EXTENDED) public Integer getTopNExplain() { return topN > 0 ? topN : null; } @@ -393,7 +394,7 @@ public boolean getSkipTag() { return skipTag; } - @Explain(displayName = "auto parallelism", normalExplain = false) + @Explain(displayName = "auto parallelism", explainLevel = Level.EXTENDED) public final boolean isAutoParallel() { return (this.reduceTraits.contains(ReducerTraits.AUTOPARALLEL)); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java index c78184b..2e50c0b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorUtils; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -162,7 +163,7 @@ public void setReducer(final Operator reducer) { this.reducer = reducer; } - @Explain(displayName = "Needs Tagging", normalExplain = false) + @Explain(displayName = "Needs Tagging", explainLevel = Level.EXTENDED) public boolean getNeedsTagging() { return needsTagging; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java index 28d16a3..03cbace 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java @@ -20,6 +20,7 @@ import java.io.Serializable; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; public class ShowColumnsDesc extends DDLDesc implements Serializable { private static final long serialVersionUID = 1L; @@ -80,7 +81,7 @@ public void setTableName(String tableName) { /** * @return the resFile */ - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFile() { return resFile; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java index df385a2..8769ec1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.plan; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import java.io.Serializable; @@ -41,7 +42,7 @@ public ShowConfDesc(Path resFile, String confName) { this.confName = confName; } - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public Path getResFile() { return resFile; } @@ -50,7 +51,7 @@ public void setResFile(Path resFile) { this.resFile = resFile; } - @Explain(displayName = "conf name", normalExplain = false) + @Explain(displayName = "conf name", explainLevel = Level.EXTENDED) public String getConfName() { return confName; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java index 71520e8..e588adf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java @@ -20,6 +20,8 @@ import java.io.Serializable; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + /** * ShowCreateTableDesc. * @@ -66,7 +68,7 @@ public ShowCreateTableDesc(String tableName, String resFile) { /** * @return the resFile */ - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFile() { return resFile; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java index 0ad0658..40bc585 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java @@ -21,6 +21,7 @@ import java.io.Serializable; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * ShowDatabasesDesc. @@ -89,7 +90,7 @@ public void setPattern(String pattern) { /** * @return the resFile */ - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFile() { return resFile; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java index 5d4a821..34214ae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java @@ -21,6 +21,7 @@ import java.io.Serializable; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * ShowFunctionsDesc. @@ -103,7 +104,7 @@ public void setPattern(String pattern) { /** * @return the resFile */ - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFile() { return resFile; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java index 1902d36..d096ef6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java @@ -23,6 +23,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * ShowLocksDesc. @@ -137,7 +138,7 @@ public void setPartSpecs(HashMap partSpec) { /** * @return the resFile */ - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFile() { return resFile; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java index 4059b92..5116cdd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java @@ -22,6 +22,7 @@ import java.util.Map; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * ShowPartitionsDesc. @@ -102,7 +103,7 @@ public void setPartSpec(Map partSpec) { /** * @return the results file */ - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFile() { return resFile; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java index 15613ed..8a04d25 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java @@ -22,6 +22,7 @@ import java.util.HashMap; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * ShowTableStatusDesc. @@ -108,7 +109,7 @@ public String getResFile() { return resFile; } - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFileString() { return getResFile(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java index 850e964..9e2f9f3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java @@ -21,6 +21,7 @@ import java.io.Serializable; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * ShowTablesDesc. @@ -98,7 +99,7 @@ public void setPattern(String pattern) { /** * @return the resFile */ - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFile() { return resFile; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTblPropertiesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTblPropertiesDesc.java index 13de46e..691546e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTblPropertiesDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTblPropertiesDesc.java @@ -22,6 +22,7 @@ import java.util.HashMap; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * ShowTblPropertiesDesc. @@ -77,7 +78,7 @@ public String getResFile() { return resFile; } - @Explain(displayName = "result file", normalExplain = false) + @Explain(displayName = "result file", explainLevel = Level.EXTENDED) public String getResFileString() { return getResFile(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java index 66d4d4a..e018833 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java @@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * ConditionalStats. @@ -90,7 +91,7 @@ public void setAggKey(String aggK) { aggKey = aggK; } - @Explain(displayName = "Stats Aggregation Key Prefix", normalExplain = false) + @Explain(displayName = "Stats Aggregation Key Prefix", explainLevel = Level.EXTENDED) public String getAggKey() { return aggKey; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java index 0e34aee..387bae7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeUtils; @@ -117,7 +118,7 @@ public Properties getProperties() { return properties; } - @Explain(displayName = "properties", normalExplain = false) + @Explain(displayName = "properties", explainLevel = Level.EXTENDED) public Map getPropertiesExplain() { return HiveStringUtils.getPropertiesExplain(getProperties()); } @@ -130,7 +131,7 @@ public void setJobProperties(Map jobProperties) { this.jobProperties = jobProperties; } - @Explain(displayName = "jobProperties", normalExplain = false) + @Explain(displayName = "jobProperties", explainLevel = Level.EXTENDED) public Map getJobProperties() { return jobProperties; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java index 0e85990..19aca60 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.exec.PTFUtils; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; +import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * Table Scan Descriptor Currently, data is only read from a base source as part @@ -190,7 +191,7 @@ public void setGatherStats(boolean gatherStats) { this.gatherStats = gatherStats; } - @Explain(displayName = "GatherStats", normalExplain = false) + @Explain(displayName = "GatherStats", explainLevel = Level.EXTENDED) public boolean isGatherStats() { return gatherStats; } @@ -215,7 +216,7 @@ public void setStatsAggPrefix(String k) { statsAggKeyPrefix = k; } - @Explain(displayName = "Statistics Aggregation Key Prefix", normalExplain = false) + @Explain(displayName = "Statistics Aggregation Key Prefix", explainLevel = Level.EXTENDED) public String getStatsAggPrefix() { return statsAggKeyPrefix; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java index 7138d51..fbee2ae 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java @@ -26,6 +26,7 @@ import java.util.Map; import junit.framework.Assert; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; @@ -41,6 +42,7 @@ import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.ExplainWork; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Before; @@ -297,7 +299,7 @@ private String explain(SemanticAnalyzer sem, QueryPlan plan, String astStringTre fs.create(tmp); fs.deleteOnExit(tmp); ExplainWork work = new ExplainWork(tmp, sem.getParseContext(), sem.getRootTasks(), - sem.getFetchTask(), astStringTree, sem, true, false, false, false, false); + sem.getFetchTask(), astStringTree, sem, Level.EXTENDED, false, false, false, false); ExplainTask task = new ExplainTask(); task.setWork(work); task.initialize(conf, plan, null);