diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/opconventer/HiveTableFunctionScanVisitor.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/opconventer/HiveTableFunctionScanVisitor.java index 55455f0291..7c2d424553 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/opconventer/HiveTableFunctionScanVisitor.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/opconventer/HiveTableFunctionScanVisitor.java @@ -26,6 +26,7 @@ import java.util.stream.Collectors; import org.apache.calcite.rex.RexCall; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -66,7 +67,7 @@ OpAttr visit(HiveTableFunctionScan scanRel) throws SemanticException { RowResolver rowResolver = new RowResolver(); List fieldNames = new ArrayList<>(scanRel.getRowType().getFieldNames()); - List exprNames = new ArrayList<>(fieldNames); + List functionFieldNames = new ArrayList<>(); List exprCols = new ArrayList<>(); Map colExprMap = new HashMap<>(); for (int pos = 0; pos < call.getOperands().size(); pos++) { @@ -74,22 +75,25 @@ OpAttr visit(HiveTableFunctionScan scanRel) throws SemanticException { scanRel.getRowType(), scanRel.getRowType(), ((HiveTableScan)scanRel.getInput(0)).getPartOrVirtualCols(), scanRel.getCluster().getTypeFactory(), true); ExprNodeDesc exprCol = call.getOperands().get(pos).accept(converter); - colExprMap.put(exprNames.get(pos), exprCol); + colExprMap.put(HiveConf.getColumnInternalName(pos), exprCol); exprCols.add(exprCol); - ColumnInfo columnInfo = new ColumnInfo(fieldNames.get(pos), exprCol.getWritableObjectInspector(), null, false); + ColumnInfo columnInfo = new ColumnInfo(HiveConf.getColumnInternalName(pos), + exprCol.getWritableObjectInspector(), SemanticAnalyzer.DUMMY_TABLE, false); rowResolver.put(columnInfo.getTabAlias(), columnInfo.getAlias(), columnInfo); + + functionFieldNames.add(HiveConf.getColumnInternalName(pos)); } OpAttr inputOpAf = hiveOpConverter.dispatch(scanRel.getInputs().get(0)); TableScanOperator op = (TableScanOperator)inputOpAf.inputs.get(0); op.getConf().setRowLimit(1); - Operator output = OperatorFactory.getAndMakeChild(new SelectDesc(exprCols, fieldNames, false), + Operator output = OperatorFactory.getAndMakeChild(new SelectDesc(exprCols, functionFieldNames, false), new RowSchema(rowResolver.getRowSchema()), op); output.setColumnExprMap(colExprMap); - Operator funcOp = genUDTFPlan(call, fieldNames, output, rowResolver); + Operator funcOp = genUDTFPlan(call, functionFieldNames, output, rowResolver); return new OpAttr(null, new HashSet(), funcOp); } @@ -133,6 +137,7 @@ private StructObjectInspector createStructObjectInspector(RowResolver rowResolve // field name from the UDTF's OI as the internal name ColumnInfo col = new ColumnInfo(sf.getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(sf.getFieldObjectInspector()), null, false); + col.setAlias(sf.getFieldName()); columnInfos.add(col); } return columnInfos; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/opconventer/HiveTableScanVisitor.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/opconventer/HiveTableScanVisitor.java index 14958aa674..72411ecc79 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/opconventer/HiveTableScanVisitor.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/opconventer/HiveTableScanVisitor.java @@ -108,6 +108,7 @@ OpAttr visit(HiveTableScan scanRel) { // 2. Setup TableScan TableScanOperator ts = (TableScanOperator) OperatorFactory.get( hiveOpConverter.getSemanticAnalyzer().getOpContext(), tsd, new RowSchema(colInfos)); + ts.setBucketingVersion(tsd.getTableMetadata().getBucketingVersion()); //now that we let Calcite process subqueries we might have more than one // tablescan with same alias. diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index ea5fa3f4c3..0ccd19e5cd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -111,7 +111,6 @@ import org.apache.calcite.tools.Frameworks; import org.apache.calcite.util.CompositeList; import org.apache.calcite.util.ImmutableBitSet; -import org.apache.calcite.util.ImmutableIntList; import org.apache.calcite.util.Pair; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.Constants; @@ -1571,8 +1570,7 @@ Operator getOptimizedHiveOPDag(RelNode optimizedOptiqPlan) throws SemanticExcept RowResolver hiveRootRR = genRowResolver(hiveRoot, getQB()); opParseCtx.put(hiveRoot, new OpParseContext(hiveRootRR)); String dest = getQB().getParseInfo().getClauseNames().iterator().next(); - if (getQB().getParseInfo().getDestSchemaForClause(dest) != null - && this.getQB().getTableDesc() == null) { + if (isInsertInto(getQB().getParseInfo(), dest)) { Operator selOp = handleInsertStatement(dest, hiveRoot, hiveRootRR, getQB()); return genFileSinkPlan(dest, getQB(), selOp); } else { @@ -1592,7 +1590,8 @@ Operator getOptimizedHiveOPDag(RelNode optimizedOptiqPlan) throws SemanticExcept } ASTNode selExprList = qb.getParseInfo().getSelForClause(dest); - RowResolver out_rwsch = handleInsertStatementSpec(colList, dest, inputRR, qb, selExprList); + RowResolver rowResolver = createRowResolver(columns); + rowResolver = handleInsertStatementSpec(colList, dest, rowResolver, qb, selExprList); List columnNames = new ArrayList(); Map colExprMap = new HashMap(); @@ -1602,11 +1601,23 @@ Operator getOptimizedHiveOPDag(RelNode optimizedOptiqPlan) throws SemanticExcept columnNames.add(outputCol); } Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(new SelectDesc(colList, - columnNames), new RowSchema(out_rwsch.getColumnInfos()), input), out_rwsch); + columnNames), new RowSchema(rowResolver.getColumnInfos()), input), rowResolver); output.setColumnExprMap(colExprMap); return output; } + private RowResolver createRowResolver(List columnInfos) { + RowResolver rowResolver = new RowResolver(); + int pos = 0; + for (ColumnInfo columnInfo : columnInfos) { + ColumnInfo newColumnInfo = new ColumnInfo(columnInfo); + newColumnInfo.setInternalName(HiveConf.getColumnInternalName(pos++)); + rowResolver.put(newColumnInfo.getTabAlias(), newColumnInfo.getAlias(), newColumnInfo); + } + + return rowResolver; + } + /*** * Unwraps Calcite Invocation exceptions coming meta data provider chain and * obtains the real cause. diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 60bfba826d..da89d3e9ed 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -691,7 +691,7 @@ private void doPhase1QBExpr(ASTNode ast, QBExpr qbexpr, String id, String alias, * @param dest destination clause * @return true or false */ - private boolean isInsertInto(QBParseInfo qbp, String dest) { + protected boolean isInsertInto(QBParseInfo qbp, String dest) { // get the destination and check if it is TABLE if(qbp == null || dest == null ) { return false; diff --git ql/src/test/org/apache/hadoop/hive/ql/util/TestUpgradeToolRerturnPath.java ql/src/test/org/apache/hadoop/hive/ql/util/TestUpgradeToolRerturnPath.java new file mode 100644 index 0000000000..1c1494fabf --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/util/TestUpgradeToolRerturnPath.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.util; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.junit.Before; + +/** + * TestUpgradeToolWithRerturnPath. + */ +public class TestUpgradeToolRerturnPath extends TestUpgradeTool { + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP, true); + } +}