diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java index 9e59d19..c9e469c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java @@ -321,8 +321,4 @@ public void close() { Utilities.clearWorkMap(); } } - - public Operator getReducer() { - return reducer; - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java index 7dcd81a..c33bd1e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java @@ -18,15 +18,10 @@ package org.apache.hadoop.hive.ql.exec.spark; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.ql.exec.Operator; -import org.apache.hadoop.hive.ql.exec.OperatorUtils; -import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.io.BytesWritable; -import org.apache.hadoop.mapred.Reporter; import scala.Tuple2; import java.io.IOException; -import java.util.Arrays; import java.util.Iterator; public class HiveReduceFunctionResultList extends @@ -44,7 +39,6 @@ public HiveReduceFunctionResultList(Configuration conf, SparkReduceRecordHandler reducer) { super(conf, inputIterator); this.reduceRecordHandler = reducer; - setOutputCollector(); } @Override @@ -62,11 +56,4 @@ protected boolean processingDone() { protected void closeRecordProcessor() { reduceRecordHandler.close(); } - - private void setOutputCollector() { - if (reduceRecordHandler != null && reduceRecordHandler.getReducer() != null) { - OperatorUtils.setChildrenCollector( - Arrays.>asList(reduceRecordHandler.getReducer()), this); - } - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java index c0b6b78..94ebcdd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java @@ -29,13 +29,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.exec.MapredContext; -import org.apache.hadoop.hive.ql.exec.ObjectCache; -import org.apache.hadoop.hive.ql.exec.ObjectCacheFactory; -import org.apache.hadoop.hive.ql.exec.Operator; -import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.*; import org.apache.hadoop.hive.ql.exec.mr.ExecMapper.ReportStats; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.ReduceWork; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.serde2.Deserializer; @@ -135,6 +132,8 @@ public void init(JobConf job, OutputCollector output, Reporter reporter) { } reducer.setReporter(rp); + OperatorUtils.setChildrenCollector( + Arrays.>asList(reducer), output); // initialize reduce operator tree try { @@ -279,8 +278,4 @@ public void close() { Utilities.clearWorkMap(); } } - - public Operator getReducer() { - return reducer; - } }