diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java
index ba04410..f276edb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java
@@ -569,12 +569,12 @@ public Void visitCall(org.apache.calcite.rex.RexCall call) {
return deterministic;
}
- public static ImmutableMap getColInfoMap(List hiveCols,
+ public static ImmutableMap getColInfoMap(List hiveCols,
int startIndx) {
- Builder bldr = ImmutableMap. builder();
+ Builder bldr = ImmutableMap. builder();
int indx = startIndx;
- for (ColumnInfo ci : hiveCols) {
+ for (T ci : hiveCols) {
bldr.put(indx, ci);
indx++;
}
@@ -618,6 +618,18 @@ public Void visitCall(org.apache.calcite.rex.RexCall call) {
return bldr.build();
}
+ public static ImmutableMap getRowColNameIndxMap(List rowFields) {
+ Builder bldr = ImmutableMap. builder();
+
+ int indx = 0;
+ for (RelDataTypeField rdt : rowFields) {
+ bldr.put(rdt.getName(), indx);
+ indx++;
+ }
+
+ return bldr.build();
+ }
+
public static ImmutableList getInputRef(List inputRefs, RelNode inputRel) {
ImmutableList.Builder bldr = ImmutableList. builder();
for (int i : inputRefs) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
index e868e37..542ec96 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
@@ -454,4 +454,16 @@ public boolean containsPartitionColumnsOnly(ImmutableBitSet cols) {
public String getQBID() {
return qbID;
}
+
+ public int getNoOfNonVirtualCols() {
+ return noOfNonVirtualCols;
+ }
+
+ public Map getPartColInfoMap() {
+ return hivePartitionColsMap;
+ }
+
+ public Map getNonPartColInfoMap() {
+ return hiveNonPartitionColsMap;
+ }
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveTableScan.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveTableScan.java
index c53d6ae..f2c5408 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveTableScan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveTableScan.java
@@ -19,6 +19,7 @@
import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import org.apache.calcite.plan.RelOptCluster;
@@ -37,6 +38,9 @@
import org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveCost;
import org.apache.hadoop.hive.ql.plan.ColStatistics;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableList.Builder;
+
/**
* Relational expression representing a scan of a HiveDB collection.
@@ -47,7 +51,9 @@
*
*/
public class HiveTableScan extends TableScan implements HiveRelNode {
-
+ private final RelDataType rowtype;
+ private final ImmutableList neededColIndxsFrmReloptHT;
+
/**
* Creates a HiveTableScan.
*
@@ -61,8 +67,15 @@
* HiveDB table
*/
public HiveTableScan(RelOptCluster cluster, RelTraitSet traitSet, RelOptHiveTable table) {
+ this(cluster, traitSet, table, table.getRowType());
+ }
+
+ private HiveTableScan(RelOptCluster cluster, RelTraitSet traitSet, RelOptHiveTable table,
+ RelDataType newRowtype) {
super(cluster, TraitsUtil.getDefaultTraitSet(cluster), table);
assert getConvention() == HiveRelNode.CONVENTION;
+ this.rowtype = newRowtype;
+ this.neededColIndxsFrmReloptHT = buildNeededColIndxsFrmReloptHT(table.getRowType(), newRowtype);
}
@Override
@@ -79,7 +92,12 @@ public RelNode copy(RelTraitSet traitSet, List inputs) {
* @return
*/
public HiveTableScan copy(RelDataType newRowtype) {
- return new HiveTableScan(getCluster(), getTraitSet(), ((RelOptHiveTable) table).copy(newRowtype));
+ return new HiveTableScan(getCluster(), getTraitSet(), ((RelOptHiveTable) table));
+ }
+
+ @Override
+ public RelDataType deriveRowType() {
+ return rowtype;
}
@Override
@@ -137,4 +155,22 @@ public RelNode project(ImmutableBitSet fieldsUsed, Set extraFi
return newHT;
}
+
+ public List getNeededColIndxsFrmReloptHT() {
+ return neededColIndxsFrmReloptHT;
+ }
+
+ private static ImmutableList buildNeededColIndxsFrmReloptHT(RelDataType htRowtype,
+ RelDataType scanRowType) {
+ Builder neededColIndxsFrmReloptHTBldr = new ImmutableList.Builder();
+ Map colNameToPosInReloptHT = HiveCalciteUtil.getRowColNameIndxMap(htRowtype
+ .getFieldList());
+ List colNamesInScanRowType = scanRowType.getFieldNames();
+
+ for (int i = 0; i < colNamesInScanRowType.size(); i++) {
+ neededColIndxsFrmReloptHTBldr.add(colNameToPosInReloptHT.get(colNamesInScanRowType.get(i)));
+ }
+
+ return neededColIndxsFrmReloptHTBldr.build();
+ }
}
\ No newline at end of file
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java
index b721190..576b18c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java
@@ -35,7 +35,6 @@
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.SemiJoin;
import org.apache.calcite.rel.logical.LogicalExchange;
-import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
@@ -192,48 +191,60 @@ OpAttr visit(HiveTableScan scanRel) {
}
RelOptHiveTable ht = (RelOptHiveTable) scanRel.getTable();
- Map newVColMap = new HashMap();
// 1. Setup TableScan Desc
- // 1.1 Create TableScanDesc
- String tableAlias = ht.getTableAlias();
+ // 1.1 Build col details used by scan
+ ArrayList colInfos = new ArrayList();
List virtualCols = new ArrayList(ht.getVirtualCols());
- TableScanDesc tsd = new TableScanDesc(tableAlias, virtualCols, ht.getHiveTableMD());
-
- // 1.2. Set Partition cols in TSDesc
- List partColInfos = ht.getPartColumns();
+ Map hiveScanVColMap = new HashMap();
List partColNames = new ArrayList();
- for (ColumnInfo ci : partColInfos) {
- partColNames.add(ci.getInternalName());
- }
- tsd.setPartColumns(partColNames);
-
- // 1.3. Set needed cols in TSDesc
List neededColumnIDs = new ArrayList();
List neededColumns = new ArrayList();
- Map colNameToIndxMap = HiveCalciteUtil.getColNameIndxMap(ht.getHiveTableMD()
- .getCols());
- for (RelDataTypeField rdtf : scanRel.getRowType().getFieldList()) {
- neededColumnIDs.add(colNameToIndxMap.get(rdtf.getName()));
- neededColumns.add(rdtf.getName());
+
+ Map posToVColMap = HiveCalciteUtil.getVColsMap(virtualCols,
+ ht.getNoOfNonVirtualCols());
+ Map posToPartColInfo = ht.getPartColInfoMap();
+ Map posToNonPartColInfo = ht.getNonPartColInfoMap();
+ List neededColIndxsFrmReloptHT = scanRel.getNeededColIndxsFrmReloptHT();
+ List scanColNames = scanRel.getRowType().getFieldNames();
+ String tableAlias = ht.getTableAlias();
+
+ String colName;
+ ColumnInfo colInfo;
+ VirtualColumn vc;
+ Integer posInRHT;
+
+ for (int i = 0; i < neededColIndxsFrmReloptHT.size(); i++) {
+ colName = scanColNames.get(i);
+ posInRHT = neededColIndxsFrmReloptHT.get(i);
+ if (posToVColMap.containsKey(posInRHT)) {
+ vc = posToVColMap.get(posInRHT);
+ virtualCols.add(vc);
+ colInfo = new ColumnInfo(vc.getName(), vc.getTypeInfo(), tableAlias, true, vc.getIsHidden());
+ hiveScanVColMap.put(i, vc);
+ } else if (posToPartColInfo.containsKey(posInRHT)) {
+ partColNames.add(colName);
+ colInfo = posToPartColInfo.get(posInRHT);
+ } else {
+ colInfo = posToNonPartColInfo.get(posInRHT);
+ }
+ neededColumnIDs.add(posInRHT);
+ neededColumns.add(colName);
+ colInfos.add(colInfo);
}
+
+ // 1.2 Create TableScanDesc
+ TableScanDesc tsd = new TableScanDesc(tableAlias, virtualCols, ht.getHiveTableMD());
+
+ // 1.3. Set Partition cols in TSDesc
+ tsd.setPartColumns(partColNames);
+
+ // 1.4. Set needed cols in TSDesc
tsd.setNeededColumnIDs(neededColumnIDs);
tsd.setNeededColumns(neededColumns);
// 2. Setup TableScan
- TableScanOperator ts = null;
- // 2.1 Construct ordered colInfo list for TS RowSchema & update vcolMap
- ArrayList colInfos = new ArrayList(ht.getNonPartColumns());
- colInfos.addAll(ht.getPartColumns());
- ColumnInfo ci;
- for (VirtualColumn vc : virtualCols) {
- ci = new ColumnInfo(vc.getName(), vc.getTypeInfo(), tableAlias, true, vc.getIsHidden());
- colInfos.add(ci);
- newVColMap.put(colInfos.size(), vc);
- }
-
- // 2.2. Create TS OP
- ts = (TableScanOperator) OperatorFactory.get(tsd, new RowSchema(colInfos));
+ TableScanOperator ts = (TableScanOperator) OperatorFactory.get(tsd, new RowSchema(colInfos));
topOps.put(ht.getQBID(), ts);
@@ -241,7 +252,7 @@ OpAttr visit(HiveTableScan scanRel) {
LOG.debug("Generated " + ts + " with row schema: [" + ts.getSchema() + "]");
}
- return new OpAttr(tableAlias, newVColMap, ts);
+ return new OpAttr(tableAlias, hiveScanVColMap, ts);
}
OpAttr visit(HiveProject projectRel) throws SemanticException {
@@ -877,5 +888,4 @@ private static ExprNodeDesc convertToExprNode(RexNode rn, RelNode inputRel, Stri
return new Pair, Map>(colInfos, newVColMap);
}
-
}