commit 88e87c18c0bc5b6eabf178072d1915a886348606 Author: Navis Ryu Date: Fri Jul 15 10:18:10 2011 +0900 HIVE-2283 initial patch diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnTracker.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnTracker.java new file mode 100644 index 0000000..c71d5f3 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnTracker.java @@ -0,0 +1,147 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; + +public class ColumnTracker { + + public static final int NONE = 0; + public static final int NORMAL = 1; + public static final int EXPLICIT = 2; + + public static String resolveString(Operator op, String input) { + return toString(resolve(op, new String[]{null, input})); + } + + public static String resolveString(Operator op, ExprNodeDesc expr) { + return toString(resolve(op, expr)); + } + + public static String[] resolve(Operator op, String input) { + return resolve(op, new String[]{null, input}); + } + + public static String[] resolve(Operator op, ExprNodeDesc expr) { + String[] resolved = resolveExpr(op, expr); + return forward(op, resolved); + } + + public static String[] resolve(Operator op, String[] input) { + if (op instanceof SelectOperator && ((SelectOperator)op).getConf().isSelectStar()) { + // I think this is a bug from GroupBy handling + return forward(op, input); + } + Map colMapper = op.getColumnExprMap(); + if (colMapper == null) { + return forward(op, input); + } + ExprNodeDesc expr = colMapper.get(input[1]); + if (expr != null && validate(op, expr, input)) { + return resolve(op, expr); + } + return input; + } + + private static boolean validate(Operator op, ExprNodeDesc expr, String[] input) { + if (input[0] != null && !input[0].isEmpty() && expr instanceof ExprNodeColumnDesc) { + ExprNodeColumnDesc column = (ExprNodeColumnDesc) expr; + if (column.getTabAlias() != null && !input[0].equals(column.getTabAlias())) { + return false; + } + } + return true; + } + + private static String[] resolveExpr(Operator op, ExprNodeDesc expr) { + if (expr instanceof ExprNodeColumnDesc) { + ExprNodeColumnDesc column = (ExprNodeColumnDesc) expr; + return new String[]{column.getTabAlias(), column.getColumn()}; + } + if (expr instanceof ExprNodeGenericFuncDesc) { + ExprNodeGenericFuncDesc func = (ExprNodeGenericFuncDesc) expr; + + List args = new ArrayList(); + for (ExprNodeDesc child : func.getChildExprs()) { + args.add(toString(resolve(op, child))); + } + String[] array = args.toArray(new String[args.size()]); + return new String[]{null, func.getGenericUDF().getDisplayString(array)}; + } + return new String[]{null, expr.getExprString()}; + } + + public static String toString(String[] column) { + return (column[0] == null ? "" : column[0] + ".") + column[1]; + } + + private static String[] forward(Operator op, String[] input) { + if (op instanceof TableScanOperator && ((TableScanOperator) op).getSplitParent() != null) { + return resolve(((TableScanOperator) op).getSplitParent(), input); + } + List> parents = parentsFor(op); + if (parents != null && !parents.isEmpty()) { + String[] current = null; + for (Operator parent : parents) { + if (parent != null) { + String[] resolved = resolve(parent, input); + if (!Arrays.equals(resolved, input)) { + if (current != null && !Arrays.equals(resolved, current)) { + throw new IllegalStateException("ambiguous column " + toString(input) + " from parent operators " + op.getParentOperators()); + } + current = resolved; + } + } + } + if (current != null) { + return current; + } + } + return input; + } + + private static List> parentsFor(Operator op) { + Operator peer = peerFor(op); + if (peer == null || peer.getParentOperators() == null || op.getParentOperators() == null) { + return op.getParentOperators(); + } + List> parents = new ArrayList>(); + parents.addAll(op.getParentOperators()); + parents.addAll(peer.getParentOperators()); + return parents; + } + + private static Operator peerFor(Operator op) { + if (op instanceof MapJoinOperator) { + return ((MapJoinOperator) op).getPeer(); + } + if (op instanceof HashTableSinkOperator) { + return ((HashTableSinkOperator) op).getPeer(); + } + return null; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index fe95cb1..7208daa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -23,13 +23,7 @@ import java.io.PrintStream; import java.io.Serializable; import java.lang.annotation.Annotation; import java.lang.reflect.Method; -import java.util.Arrays; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; +import java.util.*; import java.util.Map.Entry; import org.apache.hadoop.fs.Path; @@ -37,6 +31,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ExplainWork; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.StringUtils; @@ -94,7 +89,7 @@ public class ExplainTask extends Task implements Serializable { } private void outputMap(Map mp, String header, PrintStream out, - boolean extended, int indent) throws Exception { + boolean extended, Operator current, int indent) throws Exception { boolean first_el = true; TreeMap tree = new TreeMap(); @@ -118,7 +113,7 @@ public class ExplainTask extends Task implements Serializable { out.println(); } else if (ent.getValue() instanceof Serializable) { out.println(); - outputPlan((Serializable) ent.getValue(), out, extended, indent + 2); + outputPlan((Serializable) ent.getValue(), out, extended, current, indent + 2); } else { out.println(); } @@ -126,7 +121,7 @@ public class ExplainTask extends Task implements Serializable { } private void outputList(List l, String header, PrintStream out, - boolean extended, int indent) throws Exception { + boolean extended, Operator current, int indent) throws Exception { boolean first_el = true; boolean nl = false; @@ -148,7 +143,7 @@ public class ExplainTask extends Task implements Serializable { if (first_el) { out.println(); } - outputPlan((Serializable) o, out, extended, indent + 2); + outputPlan((Serializable) o, out, extended, current, indent + 2); } first_el = false; @@ -174,7 +169,7 @@ public class ExplainTask extends Task implements Serializable { } private void outputPlan(Serializable work, PrintStream out, boolean extended, - int indent) throws Exception { + Operator current, int indent) throws Exception { // Check if work has an explain annotation Annotation note = work.getClass().getAnnotation(Explain.class); @@ -192,11 +187,11 @@ public class ExplainTask extends Task implements Serializable { if (work instanceof Operator) { Operator operator = (Operator) work; if (operator.getConf() != null) { - outputPlan(operator.getConf(), out, extended, indent); + outputPlan(operator.getConf(), out, extended, operator, indent); } if (operator.getChildOperators() != null) { for (Operator op : operator.getChildOperators()) { - outputPlan(op, out, extended, indent + 2); + outputPlan(op, out, extended, op, indent + 2); } } return; @@ -214,12 +209,21 @@ public class ExplainTask extends Task implements Serializable { Explain xpl_note = (Explain) note; if (extended || xpl_note.normalExplain()) { + int columnTrack = extended ? xpl_note.columnTrack() : ColumnTracker.NONE; - Object val = m.invoke(work); - + Object val; + if (columnTrack == ColumnTracker.EXPLICIT) { + m = work.getClass().getMethod(m.getName(), Operator.class); + val = m.invoke(work, current); + } else { + val = m.invoke(work); + } if (val == null) { continue; } + if (columnTrack == ColumnTracker.NORMAL) { + val = trackColumns(current, val); + } String header = null; if (!xpl_note.displayName().equals("")) { @@ -239,7 +243,7 @@ public class ExplainTask extends Task implements Serializable { try { // Go through the map and print out the stuff Map mp = (Map) val; - outputMap(mp, header, out, extended, prop_indents + 2); + outputMap(mp, header, out, extended, current, prop_indents + 2); continue; } catch (ClassCastException ce) { // Ignore - all this means is that this is not a map @@ -248,7 +252,7 @@ public class ExplainTask extends Task implements Serializable { // Try this as a list try { List l = (List) val; - outputList(l, header, out, extended, prop_indents + 2); + outputList(l, header, out, extended, current, prop_indents + 2); continue; } catch (ClassCastException ce) { @@ -259,7 +263,7 @@ public class ExplainTask extends Task implements Serializable { try { Serializable s = (Serializable) val; out.println(header); - outputPlan(s, out, extended, prop_indents + 2); + outputPlan(s, out, extended, current, prop_indents + 2); continue; } catch (ClassCastException ce) { @@ -270,6 +274,36 @@ public class ExplainTask extends Task implements Serializable { } } + private Object trackColumns(Operator current, Object val) { + if (val instanceof List) { + List details = new ArrayList(); + for (Object column : (List)val) { + details.add(trackColumn(current, column)); + } + return details; + } + if (val instanceof Map) { + Mapresult = new HashMap(); + for (Map.Entry entry : ((Map)val).entrySet()) { + result.put(entry.getKey(), trackColumns(current, entry.getValue())); + } + return result; + } + return trackColumn(current, val); + } + + private String trackColumn(Operator current, Object val) { + if (val instanceof String) { + String column = (String) val; + String resolved = ColumnTracker.resolveString(current, column); + return column.equals(resolved) ? column : column + "=" + resolved; + } else if (val instanceof ExprNodeDesc) { + ExprNodeDesc expr = (ExprNodeDesc) val; + return expr.getExprString() + "=" + ColumnTracker.resolveString(current, expr); + } + return String.valueOf(val); + } + private void outputPlan(Task task, PrintStream out, boolean extended, HashSet> displayedSet, int indent) throws Exception { @@ -283,7 +317,7 @@ public class ExplainTask extends Task implements Serializable { out.printf("Stage: %s\n", task.getId()); // Start by getting the work part of the task and call the output plan for // the work - outputPlan(task.getWork(), out, extended, indent + 2); + outputPlan(task.getWork(), out, extended, null, indent + 2); out.println(); if (task instanceof ConditionalTask && ((ConditionalTask) task).getListTasks() != null) { @@ -311,7 +345,7 @@ public class ExplainTask extends Task implements Serializable { dependeciesTaskSet.add(task); boolean first = true; out.print(indentString(indent)); - out.printf("%s", task.getId()); + out.printf("%s %s", task.getId(), task.getName()); if ((task.getParentTasks() == null || task.getParentTasks().isEmpty())) { if (rootTskCandidate) { out.print(" is a root stage"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java index 9cffc10..9eea29f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java @@ -118,6 +118,8 @@ public class HashTableSinkOperator extends TerminalOperator i private long hashTableScale; private boolean isAbort = false; + private transient Operator peer; + public static class HashTableSinkObjectCtx { ObjectInspector standardOI; SerDe serde; @@ -443,4 +445,12 @@ public class HashTableSinkOperator extends TerminalOperator i public OperatorType getType() { return OperatorType.HASHTABLESINK; } + + public Operator getPeer() { + return peer; + } + + public void setPeer(Operator peer) { + this.peer = peer; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java index c7784c9..52284a5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java @@ -67,6 +67,8 @@ public class MapJoinOperator extends AbstractMapJoinOperator implem transient boolean hashTblInitedOnce; private int bigTableAlias; + private transient Operator peer; + public MapJoinOperator() { } @@ -153,7 +155,7 @@ public class MapJoinOperator extends AbstractMapJoinOperator implem hashTblInitedOnce = true; } } - + boolean localMode = HiveConf.getVar(hconf, HiveConf.ConfVars.HADOOPJT).equals("local"); String baseDir = null; @@ -301,7 +303,7 @@ public class MapJoinOperator extends AbstractMapJoinOperator implem public void closeOp(boolean abort) throws HiveException { if (mapJoinTables != null) { - for (HashMapWrapper hashTable : mapJoinTables.values()) { + for (HashMapWrapper hashTable : mapJoinTables.values()) { hashTable.close(); } } @@ -322,4 +324,12 @@ public class MapJoinOperator extends AbstractMapJoinOperator implem public OperatorType getType() { return OperatorType.MAPJOIN; } + + public Operator getPeer() { + return peer; + } + + public void setPeer(Operator peer) { + this.peer = peer; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java index 141df13..8f91658 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java @@ -60,6 +60,8 @@ public class TableScanOperator extends Operator implements private transient Stat currentStat; private transient Map stats; + private Operator splitParent; + public TableDesc getTableDesc() { return tableDesc; } @@ -262,4 +264,12 @@ public class TableScanOperator extends Operator implements } statsPublisher.closeConnection(); } + + public Operator getSplitParent() { + return splitParent; + } + + public void setSplitParent(Operator parent) { + this.splitParent = parent; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index ae33079..0fd04fc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -952,11 +952,12 @@ public final class GenMapRedUtils { // create a dummy tableScan operator on top of op // TableScanOperator is implicitly created here for each MapOperator RowResolver rowResolver = opProcCtx.getParseCtx().getOpParseCtx().get(parent).getRowResolver(); - Operator ts_op = putOpInsertMap(OperatorFactory + TableScanOperator ts_op = (TableScanOperator) putOpInsertMap(OperatorFactory .get(TableScanDesc.class, parent.getSchema()), rowResolver, parseCtx); childOpList = new ArrayList>(); childOpList.add(op); + ts_op.setSplitParent(parent); ts_op.setChildOperators(childOpList); op.getParentOperators().set(posn, ts_op); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java index 8f2fef4..6d26238 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java @@ -181,6 +181,8 @@ public final class LocalMapJoinProcFactory { for (Operator op : dummyOperators) { context.addDummyParentOp(op); } + hashTableSinkOp.setPeer(mapJoinOp); + mapJoinOp.setPeer(hashTableSinkOp); return null; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java index 85c9f8f..31078d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java @@ -21,6 +21,8 @@ package org.apache.hadoop.hive.ql.plan; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import org.apache.hadoop.hive.ql.exec.ColumnTracker; + /** * Explain. * @@ -30,4 +32,6 @@ public @interface Explain { String displayName() default ""; boolean normalExplain() default true; + + int columnTrack() default ColumnTracker.NONE; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java index 769c70d..04fe6c3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java @@ -21,6 +21,8 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; import java.util.List; +import org.apache.hadoop.hive.ql.exec.ColumnTracker; + /** * FilterDesc. * @@ -88,7 +90,7 @@ public class FilterDesc implements Serializable { this.sampleDescr = sampleDescr; } - @Explain(displayName = "predicate") + @Explain(displayName = "predicate", columnTrack = ColumnTracker.NORMAL) public org.apache.hadoop.hive.ql.plan.ExprNodeDesc getPredicate() { return predicate; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java index 9ebbb33..1b64b21 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.plan; +import org.apache.hadoop.hive.ql.exec.ColumnTracker; + /** * GroupByDesc. * @@ -114,7 +116,7 @@ public class GroupByDesc implements java.io.Serializable { this.mode = mode; } - @Explain(displayName = "keys") + @Explain(displayName = "keys", columnTrack = ColumnTracker.NORMAL) public java.util.ArrayList getKeys() { return keys; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java index 4a729e8..bdbf9a9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java @@ -28,6 +28,8 @@ import java.util.Map; import java.util.Set; import java.util.Map.Entry; +import org.apache.hadoop.hive.ql.exec.ColumnTracker; + /** * Map Join operator Descriptor implementation. * @@ -284,7 +286,7 @@ public class HashTableSinkDesc extends JoinDesc implements Serializable { /** * @return the keys */ - @Explain(displayName = "keys") + @Explain(displayName = "keys", columnTrack = ColumnTracker.NORMAL) public Map> getKeys() { return keys; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java index df9097d..7cc251e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java @@ -24,6 +24,9 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.ql.exec.ColumnTracker; +import org.apache.hadoop.hive.ql.exec.Operator; + /** * Join operator Descriptor implementation. * @@ -127,7 +130,7 @@ public class JoinDesc implements Serializable { this.reversedExprs = reversedExprs; } - @Explain(displayName = "condition expressions") + @Explain(displayName = "condition expressions", columnTrack = ColumnTracker.EXPLICIT) public Map getExprsStringMap() { if (getExprs() == null) { return null; @@ -156,6 +159,32 @@ public class JoinDesc implements Serializable { return ret; } + public Map getExprsStringMap(Operator current) { + if (getExprs() == null) { + return null; + } + Map ret = new LinkedHashMap(); + for (Map.Entry> ent : getExprs().entrySet()) { + StringBuilder sb = new StringBuilder(); + boolean first = true; + if (ent.getValue() != null) { + for (ExprNodeDesc expr : ent.getValue()) { + if (!first) { + sb.append(" "); + } + + first = false; + sb.append("{"); + String[] resolved = ColumnTracker.resolve(current, expr); + sb.append(expr.getExprString()).append('=').append(ColumnTracker.toString(resolved)); + sb.append("}"); + } + } + ret.put(ent.getKey(), sb.toString()); + } + return ret; + } + public void setExprs(final Map> exprs) { this.exprs = exprs; } @@ -167,7 +196,7 @@ public class JoinDesc implements Serializable { * * @return Map from alias to filters on the alias. */ - @Explain(displayName = "filter predicates") + @Explain(displayName = "filter predicates", columnTrack = ColumnTracker.EXPLICIT) public Map getFiltersStringMap() { if (getFilters() == null || getFilters().size() == 0) { return null; @@ -204,6 +233,42 @@ public class JoinDesc implements Serializable { } } + public Map getFiltersStringMap(Operator current) { + if (getFilters() == null || getFilters().size() == 0) { + return null; + } + + LinkedHashMap ret = new LinkedHashMap(); + boolean filtersPresent = false; + + for (Map.Entry> ent : getFilters().entrySet()) { + StringBuilder sb = new StringBuilder(); + boolean first = true; + if (ent.getValue() != null) { + if (ent.getValue().size() != 0) { + filtersPresent = true; + } + for (ExprNodeDesc expr : ent.getValue()) { + if (!first) { + sb.append(" "); + } + + first = false; + sb.append("{"); + String[] resolved = ColumnTracker.resolve(current, expr); + sb.append(expr.getExprString()).append('=').append(ColumnTracker.toString(resolved)); + sb.append("}"); + } + } + ret.put(ent.getKey(), sb.toString()); + } + + if (filtersPresent) { + return ret; + } else { + return null; + } + } public Map> getFilters() { return filters; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java index ada9826..c586c44 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java @@ -28,6 +28,8 @@ import java.util.Map; import java.util.Set; import java.util.Map.Entry; +import org.apache.hadoop.hive.ql.exec.ColumnTracker; + /** * Map Join operator Descriptor implementation. * @@ -106,7 +108,7 @@ public class MapJoinDesc extends JoinDesc implements Serializable { /** * @return the keys */ - @Explain(displayName = "keys") + @Explain(displayName = "keys", columnTrack = ColumnTracker.NORMAL) public Map> getKeys() { return keys; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java index d7f0476..707c94b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java @@ -21,6 +21,8 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; import java.util.List; +import org.apache.hadoop.hive.ql.exec.ColumnTracker; + /** * ReduceSinkDesc. * @@ -109,7 +111,7 @@ public class ReduceSinkDesc implements Serializable { this.outputValueColumnNames = outputValueColumnNames; } - @Explain(displayName = "key expressions") + @Explain(displayName = "key expressions", columnTrack = ColumnTracker.NORMAL) public java.util.ArrayList getKeyCols() { return keyCols; } @@ -126,7 +128,7 @@ public class ReduceSinkDesc implements Serializable { this.numDistributionKeys = numKeys; } - @Explain(displayName = "value expressions") + @Explain(displayName = "value expressions", columnTrack = ColumnTracker.NORMAL) public java.util.ArrayList getValueCols() { return valueCols; } @@ -135,7 +137,7 @@ public class ReduceSinkDesc implements Serializable { this.valueCols = valueCols; } - @Explain(displayName = "Map-reduce partition columns") + @Explain(displayName = "Map-reduce partition columns", columnTrack = ColumnTracker.NORMAL) public java.util.ArrayList getPartitionCols() { return partitionCols; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java index 97ae10d..47b5008 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java @@ -20,6 +20,8 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import org.apache.hadoop.hive.ql.exec.ColumnTracker; + /** * SelectDesc. * @@ -62,7 +64,7 @@ public class SelectDesc implements Serializable { this.selStarNoCompute = selStarNoCompute; } - @Explain(displayName = "expressions") + @Explain(displayName = "expressions", columnTrack = ColumnTracker.NORMAL) public java.util.ArrayList getColList() { return colList; }