diff --git ql/pom.xml ql/pom.xml
index 90dfb2c..fa79601 100644
--- ql/pom.xml
+++ ql/pom.xml
@@ -351,6 +351,7 @@
org.apache.hive:hive-common
org.apache.hive:hive-exec
org.apache.hive:hive-serde
+ org.apache.hive:hive-metastore
com.esotericsoftware.kryo:kryo
org.apache.thrift:libthrift
commons-lang:commons-lang
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
index 99b062f..65c4a8f 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
@@ -71,7 +71,6 @@ public ColumnStatsTask() {
@Override
public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) {
super.initialize(conf, queryPlan, ctx);
- work.initializeForFetch();
try {
JobConf job = new JobConf(conf);
ftOp = new FetchOperator(work.getfWork(), job);
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
index ffc4c42..f88d810 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
@@ -34,10 +34,8 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
-import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
-import org.apache.hadoop.hive.ql.exec.FooterBuffer;
import org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader;
import org.apache.hadoop.hive.ql.io.HiveInputFormat;
import org.apache.hadoop.hive.ql.io.HiveRecordReader;
@@ -48,7 +46,6 @@
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
-import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.objectinspector.DelegatedObjectInspectorFactory;
@@ -116,21 +113,25 @@ public FetchOperator() {
}
public FetchOperator(FetchWork work, JobConf job) {
- this.job = job;
- this.work = work;
- initialize();
+ this(work, job, null, null);
}
public FetchOperator(FetchWork work, JobConf job, Operator> operator,
- List vcCols) {
+ ExecMapperContext context) {
this.job = job;
this.work = work;
- this.operator = operator;
- this.vcCols = vcCols;
- initialize();
+ initialize(operator, context);
}
- private void initialize() {
+ private List getVirtualColumns(Operator> ts) {
+ if (ts instanceof TableScanOperator && ts.getConf() != null) {
+ return ((TableScanOperator)ts).getConf().getVirtualCols();
+ }
+ return null;
+ }
+
+ public void initialize(Operator> operator, ExecMapperContext context) {
+ this.vcCols = getVirtualColumns(operator);
if (hasVC = vcCols != null && !vcCols.isEmpty()) {
List names = new ArrayList(vcCols.size());
List inspectors = new ArrayList(vcCols.size());
@@ -156,15 +157,14 @@ private void initialize() {
} else {
isNativeTable = true;
}
- setupExecContext();
- }
-
- private void setupExecContext() {
- if (hasVC || work.getSplitSample() != null) {
+ if (context == null && (hasVC || work.getSplitSample() != null)) {
context = new ExecMapperContext();
- if (operator != null) {
- operator.setExecContext(context);
- }
+ }
+ this.operator = operator;
+ this.context = context;
+
+ if (operator != null && context != null) {
+ operator.setExecContext(context);
}
}
@@ -501,27 +501,28 @@ public boolean doNext(WritableComparable key, Writable value) throws IOException
/**
* Get the next row and push down it to operator tree.
- * Currently only used by FetchTask.
+ * Currently only used by FetchTask and ExecDriver (for partition sampling).
**/
public boolean pushRow() throws IOException, HiveException {
- if(work.getRowsComputedUsingStats() != null) {
- for (List