diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 403d57c..6bf0156 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -375,8 +375,7 @@ private static String indentString(int indent) { private JSONObject outputMap(Map mp, boolean hasHeader, PrintStream out, boolean extended, boolean jsonOutput, int indent) throws Exception { - TreeMap tree = new TreeMap(); - tree.putAll(mp); + TreeMap tree = getBasictypeKeyedMap(mp); JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null; if (out != null && hasHeader && !mp.isEmpty()) { out.println(); @@ -496,6 +495,32 @@ else if (ent.getValue() != null) { return jsonOutput ? json : null; } + /** + * Retruns a map which have either primitive or string keys. + * + * This is neccessary to discard object level comparators which may sort the objects based on some non-trivial logic. + * + * @param mp + * @return + */ + private TreeMap getBasictypeKeyedMap(Map mp) { + TreeMap ret = new TreeMap(); + if (mp.size() > 0) { + Object firstKey = mp.keySet().iterator().next(); + if (firstKey.getClass().isPrimitive() || firstKey instanceof String) { + // keep it as-is + ret.putAll(mp); + return ret; + } else { + for (Entry entry : mp.entrySet()) { + // discard possibly type related sorting order and replace with alphabetical + ret.put(entry.getKey().toString(), entry.getValue()); + } + } + } + return ret; + } + private JSONArray outputList(List l, PrintStream out, boolean hasHeader, boolean extended, boolean jsonOutput, int indent) throws Exception { diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java index 156e3bb..5cc3663 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java @@ -153,20 +153,7 @@ public MapWork(String name) { super(name); } - - // HIVE-12244: this @Explain should be on the new method; but it changes the explain result - // HIVE-12244: example test which can be used to validate change: -Dtest=TestMiniLlapCliDriver -Dqfile=dynamic_partition_pruning.q @Explain(displayName = "Path -> Alias", explainLevels = { Level.EXTENDED }) - @Deprecated - public LinkedHashMap> getPathToAliasesOld() { - LinkedHashMap> ret = new LinkedHashMap<>(); - for (Entry> p2a : pathToAliases.entrySet()) { - ret.put(p2a.getKey().toString(), p2a.getValue()); - } - return ret; - } - - // @Explain(displayName = "Path -> Alias", explainLevels = { Level.EXTENDED }) public LinkedHashMap> getPathToAliases() { // return pathToAliases; @@ -220,19 +207,7 @@ public void removePathToAlias(Path path){ return trunPathToAliases; } - // HIVE-12244: this @Explain should be on the new method; but it changes the explain result - // HIVE-12244: example test which can be used to validate change: combine2.q @Explain(displayName = "Path -> Partition", explainLevels = { Level.EXTENDED }) - @Deprecated - public LinkedHashMap getPathToPartitionInfoOld() { - LinkedHashMap ret = new LinkedHashMap<>(); - for (Entry p2a : pathToPartitionInfo.entrySet()) { - ret.put(p2a.getKey().toString(), p2a.getValue()); - } - return ret; - } - - //@Explain(displayName = "Path -> Partition", explainLevels = { Level.EXTENDED }) public LinkedHashMap getPathToPartitionInfo() { return pathToPartitionInfo; } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java new file mode 100644 index 0000000..1da32fc --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java @@ -0,0 +1,141 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import static org.junit.Assert.assertEquals; + +import java.io.PrintStream; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.apache.commons.io.output.ByteArrayOutputStream; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; +import org.apache.hadoop.hive.ql.plan.ExplainWork; +import org.apache.hadoop.hive.ql.plan.TableScanDesc; +import org.junit.Ignore; +import org.junit.Test; + +public class TestExplainTask { + + public static class DummyExplainDesc extends TableScanDesc { + private static final long serialVersionUID = 1L; + private Map explainResult; + + public DummyExplainDesc(Map explainResult) { + this.explainResult = explainResult; + } + + @Explain(displayName = "test", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public Map explainMethod() { + return explainResult; + } + } + + public static class DummyOperator extends TableScanOperator { + private static final long serialVersionUID = 1L; + + public DummyOperator(TableScanDesc conf) { + super(); + setConf(conf); + } + + } + + @Test + public void testExplainDoesSortTopLevelMapEntries() throws Exception { + LinkedHashMap explainMap1 = new LinkedHashMap<>(); + explainMap1.put("/k1", "v"); + explainMap1.put("k3", "v"); + explainMap1.put("hdfs:///k2", "v"); + explainMap1.put("hdfs:///k1", "v"); + + LinkedHashMap explainMap2 = new LinkedHashMap<>(); + explainMap2.put("hdfs:///k1", "v"); + explainMap2.put("hdfs:///k2", "v"); + explainMap2.put("/k1", "v"); + explainMap2.put("k3", "v"); + + String result1 = explainToString(explainMap1); + String result2 = explainToString(explainMap2); + + assertEquals("both maps should be ordered, regardless of input order", result1, result2); + } + + @Test + public void testExplainDoesSortPathAsStrings() throws Exception { + LinkedHashMap explainMap1 = new LinkedHashMap<>(); + explainMap1.put("/k1", "v"); + explainMap1.put("k3", "v"); + explainMap1.put("hdfs:/k2", "v"); + explainMap1.put("hdfs:/k1", "v"); + + LinkedHashMap explainMap2 = new LinkedHashMap<>(); + explainMap2.put(new Path("hdfs:/k1"), "v"); + explainMap2.put(new Path("hdfs:/k2"), "v"); + explainMap2.put(new Path("/k1"), "v"); + explainMap2.put(new Path("k3"), "v"); + + String result1 = explainToString(explainMap1); + String result2 = explainToString(explainMap2); + + assertEquals("both maps should be sorted the same way", result1, result2); + } + + @Ignore("HIVE-14287 will fix this later") + @Test + public void testExplainDoesSortMapValues() throws Exception { + LinkedHashMap explainMap1Val = new LinkedHashMap<>(); + explainMap1Val.put("a", "v"); + explainMap1Val.put("b", "v"); + + LinkedHashMap> explainMap1 = new LinkedHashMap<>(); + explainMap1.put("k", explainMap1Val); + + LinkedHashMap explainMap2Val = new LinkedHashMap<>(); + explainMap2Val.put("b", "v"); + explainMap2Val.put("a", "v"); + + LinkedHashMap> explainMap2 = new LinkedHashMap<>(); + explainMap2.put("k", explainMap2Val); + + String result1 = explainToString(explainMap1); + String result2 = explainToString(explainMap2); + + assertEquals("both maps should be sorted the same way", result1, result2); + } + + private String explainToString(Map explainMap) throws Exception { + ExplainWork work = new ExplainWork(); + ParseContext pCtx = new ParseContext(); + HashMap topOps = new HashMap<>(); + TableScanOperator scanOp = new DummyOperator(new DummyExplainDesc(explainMap)); + topOps.put("sample", scanOp); + pCtx.setTopOps(topOps); + work.setParseContext(pCtx); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + new ExplainTask().getJSONLogicalPlan(new PrintStream(baos), work); + baos.close(); + return baos.toString(); + } + +}