diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 4027229..f26d1df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -109,6 +109,8 @@ import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.OperatorFactory; +import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; @@ -170,6 +172,7 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils; import org.apache.hadoop.hive.ql.plan.GroupByDesc; +import org.apache.hadoop.hive.ql.plan.SelectDesc; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode; import org.apache.hadoop.hive.serde.serdeConstants; @@ -649,7 +652,42 @@ Operator getOptimizedHiveOPDag() throws SemanticException { conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict")).convert(modifiedOptimizedOptiqPlan); RowResolver hiveRootRR = genRowResolver(hiveRoot, getQB()); opParseCtx.put(hiveRoot, new OpParseContext(hiveRootRR)); - return genFileSinkPlan(getQB().getParseInfo().getClauseNames().iterator().next(), getQB(), hiveRoot); + String dest = getQB().getParseInfo().getClauseNames().iterator().next(); + if (getQB().getParseInfo().getDestSchemaForClause(dest) != null + && this.getQB().getTableDesc() == null) { + Operator selOp = handleInsertStatement(dest, hiveRoot, hiveRootRR, getQB()); + return genFileSinkPlan(dest, getQB(), selOp); + } else { + return genFileSinkPlan(dest, getQB(), hiveRoot); + } + } + + // This function serves as the wrapper of handleInsertStatementSpec in + // SemanticAnalyzer + Operator handleInsertStatement(String dest, Operator input, RowResolver inputRR, QB qb) + throws SemanticException { + ArrayList colList = new ArrayList(); + ArrayList columns = inputRR.getColumnInfos(); + for (int i = 0; i < columns.size(); i++) { + ColumnInfo col = columns.get(i); + colList.add(new ExprNodeColumnDesc(col)); + } + ASTNode selExprList = qb.getParseInfo().getSelForClause(dest); + + RowResolver out_rwsch = handleInsertStatementSpec(colList, dest, inputRR, inputRR, qb, + selExprList); + + ArrayList columnNames = new ArrayList(); + Map colExprMap = new HashMap(); + for (int i = 0; i < colList.size(); i++) { + String outputCol = getColumnInternalName(i); + colExprMap.put(outputCol, colList.get(i)); + columnNames.add(outputCol); + } + Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(new SelectDesc(colList, + columnNames), new RowSchema(out_rwsch.getColumnInfos()), input), out_rwsch); + output.setColumnExprMap(colExprMap); + return output; } private RelNode introduceProjectIfNeeded(RelNode optimizedOptiqPlan) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index f05407d..2240653 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -3882,7 +3882,7 @@ static boolean isRegex(String pattern, HiveConf conf) { * @see #handleInsertStatementSpecPhase1(ASTNode, QBParseInfo, org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.Phase1Ctx) * @throws SemanticException */ - private RowResolver handleInsertStatementSpec(List col_list, String dest, + public RowResolver handleInsertStatementSpec(List col_list, String dest, RowResolver outputRR, RowResolver inputRR, QB qb, ASTNode selExprList) throws SemanticException { //(z,x)