diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveOptiqUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveOptiqUtil.java index 2ad5c34..ebab2cf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveOptiqUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveOptiqUtil.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveJoinRel; import org.apache.hadoop.hive.ql.parse.ASTNode; -import org.apache.hadoop.hive.ql.parse.HiveParser; import org.eigenbase.rel.RelFactories.ProjectFactory; import org.eigenbase.rel.RelNode; import org.eigenbase.relopt.RelOptUtil; @@ -74,12 +73,12 @@ return vCols; } - public static boolean validateASTForCBO (ASTNode ast) { + public static boolean validateASTForCBO(ASTNode ast) { String astTree = ast.toStringTree(); - String [] tokens = {"TOK_CHARSETLITERAL"}; + String[] tokens = { "TOK_CHARSETLITERAL" }; for (String token : tokens) { if (astTree.contains(token)) { - return false; + return false; } } return true; @@ -459,9 +458,8 @@ private static JoinLeafPredicateInfo constructJoinLeafPredicateInfo(HiveJoinRel int rightOffSet = j.getLeft().getRowType().getFieldCount(); // 1. Split leaf join predicate to expressions from left, right - @SuppressWarnings("unused") - RexNode nonEquiPredicate = RelOptUtil.splitJoinCondition(j.getSystemFieldList(), j.getLeft(), - j.getRight(), pe, joinKeyExprsFromLeft, joinKeyExprsFromRight, filterNulls, null); + RelOptUtil.splitJoinCondition(j.getSystemFieldList(), j.getLeft(), j.getRight(), pe, + joinKeyExprsFromLeft, joinKeyExprsFromRight, filterNulls, null); // 2. For left expressions, collect child projection indexes used InputReferencedVisitor irvLeft = new InputReferencedVisitor(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/cost/HiveCost.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/cost/HiveCost.java index a4df413..72fe5d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/cost/HiveCost.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/cost/HiveCost.java @@ -123,6 +123,7 @@ public boolean equals(RelOptCost other) { public boolean isEqWithEpsilon(RelOptCost other) { return (this == other) || (Math.abs((this.rowCount) - (other.getRows())) < RelOptUtil.EPSILON); + // Turn this one once we do the Algorithm selection in CBO /* * return (this == other) || (Math.abs((this.dCpu + this.dIo) - * (other.getCpu() + other.getIo())) < RelOptUtil.EPSILON); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/cost/HiveCostUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/cost/HiveCostUtil.java index 257c380..7436f12 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/cost/HiveCostUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/cost/HiveCostUtil.java @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveTableScanRel; import org.eigenbase.relopt.RelOptCost; +// Use this once we have Join Algorithm selection public class HiveCostUtil { private static final double cpuCostInNanoSec = 1.0; private static final double netCostInNanoSec = 150 * cpuCostInNanoSec; @@ -28,6 +29,7 @@ private static final double localFSReadCostInNanoSec = 4 * netCostInNanoSec; private static final double hDFSWriteCostInNanoSec = 10 * localFSWriteCostInNanoSec; @SuppressWarnings("unused") +//Use this once we have Join Algorithm selection private static final double hDFSReadCostInNanoSec = 1.5 * localFSReadCostInNanoSec; public static RelOptCost computCardinalityBasedCost(HiveRel hr) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveJoinRel.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveJoinRel.java index 363e9ee..3d6aa84 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveJoinRel.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveJoinRel.java @@ -55,6 +55,7 @@ private final boolean leftSemiJoin; private final JoinAlgorithm joinAlgorithm; + //This will be used once we do Join Algorithm selection @SuppressWarnings("unused") private final MapJoinStreamingRelation mapJoinStreamingSide = MapJoinStreamingRelation.NONE; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveLimitRel.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveLimitRel.java deleted file mode 100644 index f8755d0..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveLimitRel.java +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.optimizer.optiq.reloperators; - -import java.util.List; - -import org.apache.hadoop.hive.ql.optimizer.optiq.TraitsUtil; -import org.apache.hadoop.hive.ql.optimizer.optiq.cost.HiveCost; -import org.eigenbase.rel.RelNode; -import org.eigenbase.rel.SingleRel; -import org.eigenbase.relopt.RelOptCluster; -import org.eigenbase.relopt.RelOptCost; -import org.eigenbase.relopt.RelOptPlanner; -import org.eigenbase.relopt.RelTraitSet; -import org.eigenbase.rex.RexNode; - -public class HiveLimitRel extends SingleRel implements HiveRel { - private final RexNode offset; - private final RexNode fetch; - - HiveLimitRel(RelOptCluster cluster, RelTraitSet traitSet, RelNode child, RexNode offset, - RexNode fetch) { - super(cluster, TraitsUtil.getDefaultTraitSet(cluster), child); - this.offset = offset; - this.fetch = fetch; - assert getConvention() instanceof HiveRel; - assert getConvention() == child.getConvention(); - } - - @Override - public HiveLimitRel copy(RelTraitSet traitSet, List newInputs) { - return new HiveLimitRel(getCluster(), traitSet, sole(newInputs), offset, fetch); - } - - public void implement(Implementor implementor) { - } - - @Override - public RelOptCost computeSelfCost(RelOptPlanner planner) { - return HiveCost.FACTORY.makeZeroCost(); - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveSortRel.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveSortRel.java index dc8f614..82db7b1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveSortRel.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/reloperators/HiveSortRel.java @@ -34,8 +34,6 @@ public HiveSortRel(RelOptCluster cluster, RelTraitSet traitSet, RelNode child, RelCollation collation, RexNode offset, RexNode fetch) { super(cluster, TraitsUtil.getSortTraitSet(cluster, traitSet, collation), child, collation, offset, fetch); - - assert getConvention() == child.getConvention(); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HiveMergeProjectRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HiveMergeProjectRule.java index 416343a..d6581e6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HiveMergeProjectRule.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HiveMergeProjectRule.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveProjectRel; import org.eigenbase.rel.rules.MergeProjectRule; +//Currently not used, turn this on later public class HiveMergeProjectRule extends MergeProjectRule { public static final HiveMergeProjectRule INSTANCE = new HiveMergeProjectRule(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePartitionPrunerRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePartitionPrunerRule.java index 6f06c6a..ee19a6c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePartitionPrunerRule.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePartitionPrunerRule.java @@ -52,9 +52,6 @@ protected void perform(RelOptRuleCall call, FilterRelBase filter, Pair predicates = PartitionPruner .extractPartitionPredicates(filter.getCluster(), hiveTable, predicate); RexNode partColExpr = predicates.left; - RexNode remainingExpr = predicates.right; - remainingExpr = remainingExpr == null ? filter.getCluster().getRexBuilder() - .makeLiteral(true) : remainingExpr; hiveTable.computePartitionList(conf, partColExpr); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java index 83242b3..7ebe652 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java @@ -217,7 +217,7 @@ private boolean isPartitionPredicate(RexNode expr, RelNode r) { expr = RelOptUtil.pushFilterPastProject(expr, (ProjectRelBase) r); return isPartitionPredicate(expr, ((ProjectRelBase) r).getChild()); } else if ( r instanceof FilterRelBase ) { - isPartitionPredicate(expr, ((ProjectRelBase) r).getChild()); + return isPartitionPredicate(expr, ((FilterRelBase) r).getChild()); } else if ( r instanceof HiveTableScanRel ) { RelOptHiveTable table = (RelOptHiveTable) ((HiveTableScanRel)r).getTable(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/DerivedTableInjector.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/DerivedTableInjector.java index f632d64..a655174 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/DerivedTableInjector.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/DerivedTableInjector.java @@ -103,12 +103,6 @@ private static void convertOpTree(RelNode rel, RelNode parent) { introduceDerivedTable(rel, parent); } } - } else if (rel instanceof TableAccessRelBase) { - - } else if (rel instanceof TableFunctionRelBase) { - - } else if (rel instanceof ValuesRelBase) { - } List childNodes = rel.getInputs(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java index 006870c..56e210d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java @@ -134,13 +134,12 @@ private RexNode convert(final ExprNodeFieldDesc fieldDesc) throws SemanticExcept RexNode rexNode = convert(fieldDesc.getDesc()); if (rexNode instanceof RexCall) { // regular case of accessing nested field in a column - return m_cluster.getRexBuilder().makeFieldAccess(rexNode, - fieldDesc.getFieldName(), true); + return m_cluster.getRexBuilder().makeFieldAccess(rexNode, fieldDesc.getFieldName(), true); } else { // This may happen for schema-less tables, where columns are dynamically // supplied by serdes. - throw new OptiqSemanticException("Unexpected rexnode : " + - rexNode.getClass().getCanonicalName()); + throw new OptiqSemanticException("Unexpected rexnode : " + + rexNode.getClass().getCanonicalName()); } } @@ -152,7 +151,8 @@ private RexNode convert(final ExprNodeGenericFuncDesc func) throws SemanticExcep List childRexNodeLst = new LinkedList(); Builder argTypeBldr = ImmutableList. builder(); - // TODO: 1) Expand to other functions as needed 2) What about types other than primitive. + // TODO: 1) Expand to other functions as needed 2) What about types other + // than primitive. if (func.getGenericUDF() instanceof GenericUDFBaseNumeric) { tgtDT = func.getTypeInfo(); } else if (func.getGenericUDF() instanceof GenericUDFBaseCompare) { @@ -179,10 +179,9 @@ private RexNode convert(final ExprNodeGenericFuncDesc func) throws SemanticExcep expr = handleExplicitCast(func, childRexNodeLst); if (expr == null) { - retType = (expr != null) ? expr.getType() : TypeConverter.convert(func.getTypeInfo(), - m_cluster.getTypeFactory()); - SqlOperator optiqOp = SqlFunctionConverter.getOptiqOperator( - func.getFuncText(), func.getGenericUDF(), argTypeBldr.build(), retType); + retType = TypeConverter.convert(func.getTypeInfo(), m_cluster.getTypeFactory()); + SqlOperator optiqOp = SqlFunctionConverter.getOptiqOperator(func.getFuncText(), + func.getGenericUDF(), argTypeBldr.build(), retType); expr = m_cluster.getRexBuilder().makeCall(optiqOp, childRexNodeLst); } else { retType = expr.getType(); @@ -221,8 +220,8 @@ private boolean castExprUsingUDFBridge(GenericUDF gUDF) { return castExpr; } - private RexNode handleExplicitCast(ExprNodeGenericFuncDesc func, - List childRexNodeLst) throws OptiqSemanticException { + private RexNode handleExplicitCast(ExprNodeGenericFuncDesc func, List childRexNodeLst) + throws OptiqSemanticException { RexNode castExpr = null; if (childRexNodeLst != null && childRexNodeLst.size() == 1) { @@ -230,8 +229,6 @@ private RexNode handleExplicitCast(ExprNodeGenericFuncDesc func, if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar) || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate) || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) { - // || (udf instanceof GenericUDFToUnixTimeStamp) || (udf instanceof - // GenericUDFTimestamp) || castExprUsingUDFBridge(udf)) { castExpr = m_cluster.getRexBuilder().makeAbstractCast( TypeConverter.convert(func.getTypeInfo(), m_cluster.getTypeFactory()), childRexNodeLst.get(0)); @@ -285,8 +282,8 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws OptiqSemanticExce PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory(); ConstantObjectInspector coi = literal.getWritableObjectInspector(); - Object value = ObjectInspectorUtils.copyToStandardJavaObject( - coi.getWritableConstantValue(), coi); + Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), + coi); RexNode optiqLiteral = null; // TODO: Verify if we need to use ConstantObjectInspector to unwrap data @@ -295,7 +292,7 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws OptiqSemanticExce optiqLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue()); break; case BYTE: - byte[] byteArray = new byte[] { (Byte) value}; + byte[] byteArray = new byte[] { (Byte) value }; ByteString bs = new ByteString(byteArray); optiqLiteral = rexBuilder.makeBinaryLiteral(bs); break; @@ -316,24 +313,31 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws OptiqSemanticExce value = ((Decimal128) value).toBigDecimal(); } if (value == null) { - // We have found an invalid decimal value while enforcing precision and scale. Ideally, - // we would replace it with null here, which is what Hive does. However, we need to plumb - // this thru up somehow, because otherwise having different expression type in AST causes - // the plan generation to fail after CBO, probably due to some residual state in SA/QB. - // For now, we will not run CBO in the presence of invalid decimal literals. - throw new OptiqSemanticException("Expression " - + literal.getExprString() + " is not a valid decimal"); + // We have found an invalid decimal value while enforcing precision and + // scale. Ideally, + // we would replace it with null here, which is what Hive does. However, + // we need to plumb + // this thru up somehow, because otherwise having different expression + // type in AST causes + // the plan generation to fail after CBO, probably due to some residual + // state in SA/QB. + // For now, we will not run CBO in the presence of invalid decimal + // literals. + throw new OptiqSemanticException("Expression " + literal.getExprString() + + " is not a valid decimal"); // TODO: return createNullLiteral(literal); } - BigDecimal bd = (BigDecimal)value; + BigDecimal bd = (BigDecimal) value; BigInteger unscaled = bd.unscaledValue(); if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) { optiqLiteral = rexBuilder.makeExactLiteral(bd); } else { - // CBO doesn't support unlimited precision decimals. In practice, this will work... - // An alternative would be to throw CboSemanticException and fall back to no CBO. - RelDataType relType = m_cluster.getTypeFactory().createSqlType( - SqlTypeName.DECIMAL, bd.scale(), unscaled.toString().length()); + // CBO doesn't support unlimited precision decimals. In practice, this + // will work... + // An alternative would be to throw CboSemanticException and fall back + // to no CBO. + RelDataType relType = m_cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, + bd.scale(), unscaled.toString().length()); optiqLiteral = rexBuilder.makeExactLiteral(bd, relType); } break; @@ -376,8 +380,8 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws OptiqSemanticExce } private RexNode createNullLiteral(ExprNodeDesc expr) throws OptiqSemanticException { - return m_cluster.getRexBuilder().makeNullLiteral(TypeConverter.convert( - expr.getTypeInfo(), m_cluster.getTypeFactory()).getSqlTypeName()); + return m_cluster.getRexBuilder().makeNullLiteral( + TypeConverter.convert(expr.getTypeInfo(), m_cluster.getTypeFactory()).getSqlTypeName()); } public static RexNode convert(RelOptCluster cluster, ExprNodeDesc joinCondnExprNode, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 943cc5c..eb73bef 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -249,7 +249,6 @@ import org.eigenbase.rel.rules.MergeFilterRule; import org.eigenbase.rel.rules.PushFilterPastProjectRule; import org.eigenbase.rel.rules.PushFilterPastSetOpRule; -import org.eigenbase.rel.rules.RemoveTrivialProjectRule; import org.eigenbase.rel.rules.SemiJoinRel; import org.eigenbase.rel.rules.TransitivePredicatesOnJoinRule; import org.eigenbase.relopt.RelOptCluster; @@ -284,7 +283,6 @@ import org.eigenbase.util.CompositeList; import org.eigenbase.util.ImmutableIntList; import org.eigenbase.util.Pair; -import org.eigenbase.util.Util; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; @@ -12642,7 +12640,7 @@ private RelNode genTableLogicalPlan(String tableAlias, QB qb) throws SemanticExc try { - // 0. If the table has a Sample specified, bail from Optiq path. + // 1. If the table has a Sample specified, bail from Optiq path. if ( qb.getParseInfo().getTabSample(tableAlias) != null || SemanticAnalyzer.this.nameToSplitSample.containsKey(tableAlias)) { String msg = String.format("Table Sample specified for %s." + @@ -12652,9 +12650,6 @@ private RelNode genTableLogicalPlan(String tableAlias, QB qb) throws SemanticExc throw new OptiqSemanticException(msg); } - // 1. Get Table Alias - String alias_id = getAliasId(tableAlias, qb); - // 2. Get Table Metadata Table tab = qb.getMetaData().getSrcForAlias(tableAlias); @@ -12984,7 +12979,6 @@ private AggregateCall convertGBAgg(AggInfo agg, RelNode input, List gbC private RelNode genGBRelNode(List gbExprs, List aggInfoLst, RelNode srcRel) throws SemanticException { RowResolver gbInputRR = this.relToHiveRR.get(srcRel); - ArrayList signature = gbInputRR.getRowSchema().getSignature(); ImmutableMap posMap = this.relToHiveColNameOptiqPosMap.get(srcRel); RexNodeConverter converter = new RexNodeConverter(this.cluster, srcRel.getRowType(), posMap, 0, false); @@ -13194,7 +13188,6 @@ private RelNode genGBLogicalPlan(QB qb, RelNode srcRel) throws SemanticException if (hasGrpByAstExprs || hasAggregationTrees) { ArrayList gbExprNDescLst = new ArrayList(); ArrayList outputColumnNames = new ArrayList(); - int numDistinctUDFs = 0; // 2. Input, Output Row Resolvers RowResolver groupByInputRowResolver = this.relToHiveRR.get(srcRel); @@ -13226,9 +13219,6 @@ private RelNode genGBLogicalPlan(QB qb, RelNode srcRel) throws SemanticException String aggName = unescapeIdentifier(value.getChild(0).getText()); boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI; boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR; - if (isDistinct) { - numDistinctUDFs++; - } // 4.2 Convert UDAF Params to ExprNodeDesc ArrayList aggParameters = new ArrayList(); @@ -13593,7 +13583,6 @@ private RelNode genSelectForWindowing(QB qb, RelNode srcRel) throws SemanticExce // 6.2.2 Update Output Row Schema ColumnInfo oColInfo = new ColumnInfo( getColumnInternalName(projsForWindowSelOp.size()), wtp.getValue(), null, false); - String colAlias = wExprSpec.getAlias(); if (false) { out_rwsch.checkColumn(null, wExprSpec.getAlias()); out_rwsch.put(null, wExprSpec.getAlias(), oColInfo);