diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java index 24c8281..8937aab 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java @@ -38,6 +38,8 @@ private transient StructField[] fields; private transient boolean[] unionField; + private transient String txt; + public ExprNodeColumnEvaluator(ExprNodeColumnDesc expr) { super(expr); } @@ -45,6 +47,7 @@ public ExprNodeColumnEvaluator(ExprNodeColumnDesc expr) { @Override public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { +txt = expr.getColumn(); // We need to support field names like KEY.0, VALUE.1 between // map-reduce boundary. String[] names = expr.getColumn().split("\\."); @@ -103,4 +106,8 @@ protected Object _evaluate(Object row, int version) throws HiveException { return o; } + public String toString() { + return "ExprNodeColumnEvaluator("+txt+")"; + } + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java index 03a64e8..9b38d50 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.Serializer; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -261,6 +262,9 @@ protected static StructObjectInspector initEvaluatorsAndReturnStruct( return ObjectInspectorFactory.getStandardStructObjectInspector(outputColNames, sois ); } + + private boolean xyz = true; + @Override @SuppressWarnings("unchecked") public void processOp(Object row, int tag) throws HiveException { @@ -298,6 +302,24 @@ public void processOp(Object row, int tag) throws HiveException { HiveKey firstKey = toHiveKey(cachedKeys[0], tag, null); int distKeyLength = firstKey.getDistKeyLength(); + if(distKeyLength <= 1) { + if(xyz) { + StringBuffer x1 = new StringBuffer(); + x1.append("numDistributionKeys = "+ numDistributionKeys + "\n"); + for (int i = 0; i < numDistributionKeys; i++) { + x1.append(cachedKeys[0][i] + " --> " + keyEval[i] + "\n"); + } + x1.append("key_row="+ SerDeUtils.getJSONString(row, keyObjectInspector) + "\n"); + x1.append("full_row="+ SerDeUtils.getJSONString(row, rowInspector) + "\n"); + x1.append("col_names="); + for(String col : conf.getOutputKeyColumnNames() ) { + x1.append(col+","); + } + x1.append("\n"); + LOG.info("GOPAL: " + x1.toString()); + xyz = false; + } + } if (numDistinctExprs > 0) { populateCachedDistinctKeys(row, 0); firstKey = toHiveKey(cachedKeys[0], tag, distKeyLength);