diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java index 00bf009..ec22f1a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java @@ -20,7 +20,6 @@ import java.math.BigDecimal; import java.sql.Date; import java.sql.Timestamp; -import java.util.ArrayList; import java.util.Calendar; import java.util.LinkedList; import java.util.List; @@ -43,11 +42,9 @@ import org.apache.calcite.sql.type.SqlTypeUtil; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; -import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.ASTConverter.RexVisitor; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.ASTConverter.Schema; @@ -139,29 +136,6 @@ public ExprNodeDesc visitCall(RexCall call) { && SqlTypeUtil.equalSansNullability(dTFactory, call.getType(), call.operands.get(0).getType())) { return args.get(0); - } else if (ASTConverter.isFlat(call)) { - // If Expr is flat (and[p,q,r,s] or[p,q,r,s]) then recursively build the - // exprnode - GenericUDF hiveUdf = SqlFunctionConverter.getHiveUDF(call.getOperator(), call.getType(), 2); - ArrayList tmpExprArgs = new ArrayList(); - tmpExprArgs.addAll(args.subList(0, 2)); - try { - gfDesc = ExprNodeGenericFuncDesc.newInstance(hiveUdf, tmpExprArgs); - } catch (UDFArgumentException e) { - LOG.error(e); - throw new RuntimeException(e); - } - for (int i = 2; i < call.operands.size(); i++) { - tmpExprArgs = new ArrayList(); - tmpExprArgs.add(gfDesc); - tmpExprArgs.add(args.get(i)); - try { - gfDesc = ExprNodeGenericFuncDesc.newInstance(hiveUdf, tmpExprArgs); - } catch (UDFArgumentException e) { - LOG.error(e); - throw new RuntimeException(e); - } - } } else { GenericUDF hiveUdf = SqlFunctionConverter.getHiveUDF(call.getOperator(), call.getType(), args.size());