diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ExprNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ExprNodeConverter.java index 62346ac..1e9018f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ExprNodeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ExprNodeConverter.java @@ -41,11 +41,10 @@ public class ExprNodeConverter extends RexVisitorImpl { RelDataType rType; - String tabAlias; - boolean partitioningExpr; + String tabAlias; + boolean partitioningExpr; - public ExprNodeConverter(String tabAlias, RelDataType rType, - boolean partitioningExpr) { + public ExprNodeConverter(String tabAlias, RelDataType rType, boolean partitioningExpr) { super(true); /* * hb: 6/25/14 for now we only support expressions that only contain @@ -63,8 +62,8 @@ public ExprNodeConverter(String tabAlias, RelDataType rType, @Override public ExprNodeDesc visitInputRef(RexInputRef inputRef) { RelDataTypeField f = rType.getFieldList().get(inputRef.getIndex()); - return new ExprNodeColumnDesc(TypeConverter.convert(f.getType()), - f.getName(), tabAlias, partitioningExpr); + return new ExprNodeColumnDesc(TypeConverter.convert(f.getType()), f.getName(), tabAlias, + partitioningExpr); } @Override @@ -80,59 +79,53 @@ public ExprNodeDesc visitCall(RexCall call) { } return new ExprNodeGenericFuncDesc(TypeConverter.convert(call.getType()), - SqlFunctionConverter.getHiveUDF(call.getOperator()), args); + SqlFunctionConverter.getHiveUDF(call.getOperator(), call.getType()), args); } @Override public ExprNodeDesc visitLiteral(RexLiteral literal) { RelDataType lType = literal.getType(); - switch (literal.getTypeName()) { + switch (literal.getType().getSqlTypeName()) { case BOOLEAN: - return new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, - literal.getValue3()); + return new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, Boolean.valueOf(RexLiteral + .booleanValue(literal))); case TINYINT: - return new ExprNodeConstantDesc(TypeInfoFactory.byteTypeInfo, - literal.getValue3()); + return new ExprNodeConstantDesc(TypeInfoFactory.byteTypeInfo, Byte.valueOf(((Number) literal + .getValue3()).byteValue())); case SMALLINT: return new ExprNodeConstantDesc(TypeInfoFactory.shortTypeInfo, - literal.getValue3()); + Short.valueOf(((Number) literal.getValue3()).shortValue())); case INTEGER: return new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, - literal.getValue3()); + Integer.valueOf(((Number) literal.getValue3()).intValue())); case BIGINT: - return new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, - literal.getValue3()); + return new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long.valueOf(((Number) literal + .getValue3()).longValue())); case FLOAT: return new ExprNodeConstantDesc(TypeInfoFactory.floatTypeInfo, - literal.getValue3()); + Float.valueOf(((Number) literal.getValue3()).floatValue())); case DOUBLE: return new ExprNodeConstantDesc(TypeInfoFactory.doubleTypeInfo, - literal.getValue3()); + Double.valueOf(((Number) literal.getValue3()).doubleValue())); case DATE: - return new ExprNodeConstantDesc(TypeInfoFactory.dateTypeInfo, - literal.getValue3()); + return new ExprNodeConstantDesc(TypeInfoFactory.dateTypeInfo, literal.getValue3()); case TIMESTAMP: - return new ExprNodeConstantDesc(TypeInfoFactory.timestampTypeInfo, - literal.getValue3()); + return new ExprNodeConstantDesc(TypeInfoFactory.timestampTypeInfo, literal.getValue3()); case BINARY: - return new ExprNodeConstantDesc(TypeInfoFactory.binaryTypeInfo, - literal.getValue3()); + return new ExprNodeConstantDesc(TypeInfoFactory.binaryTypeInfo, literal.getValue3()); case DECIMAL: - return new ExprNodeConstantDesc(TypeInfoFactory.getDecimalTypeInfo( - lType.getPrecision(), lType.getScale()), literal.getValue3()); + return new ExprNodeConstantDesc(TypeInfoFactory.getDecimalTypeInfo(lType.getPrecision(), + lType.getScale()), literal.getValue3()); case VARCHAR: - return new ExprNodeConstantDesc(TypeInfoFactory.getVarcharTypeInfo(lType - .getPrecision()), - new HiveVarchar((String)literal.getValue3(), lType.getPrecision())); + return new ExprNodeConstantDesc(TypeInfoFactory.getVarcharTypeInfo(lType.getPrecision()), + new HiveVarchar((String) literal.getValue3(), lType.getPrecision())); case CHAR: - return new ExprNodeConstantDesc(TypeInfoFactory.getCharTypeInfo(lType - .getPrecision()), - new HiveChar((String)literal.getValue3(), lType.getPrecision())); + return new ExprNodeConstantDesc(TypeInfoFactory.getCharTypeInfo(lType.getPrecision()), + new HiveChar((String) literal.getValue3(), lType.getPrecision())); case OTHER: default: - return new ExprNodeConstantDesc(TypeInfoFactory.voidTypeInfo, - literal.getValue3()); + return new ExprNodeConstantDesc(TypeInfoFactory.voidTypeInfo, literal.getValue3()); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java index 01646e1..eea796d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java @@ -10,6 +10,8 @@ import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.eigenbase.reltype.RelDataType; import org.eigenbase.reltype.RelDataTypeFactory; import org.eigenbase.sql.SqlAggFunction; @@ -49,12 +51,54 @@ public static SqlOperator getOptiqOperator(GenericUDF hiveUDF, return getOptiqFn(getName(hiveUDF), optiqArgTypes, retType); } - public static GenericUDF getHiveUDF(SqlOperator op) { + public static GenericUDF getHiveUDF(SqlOperator op, RelDataType dt) { String name = reverseOperatorMap.get(op); FunctionInfo hFn = name != null ? FunctionRegistry.getFunctionInfo(name) : null; + if (hFn == null) + hFn = handleExplicitCast(op, dt); return hFn == null ? null : hFn.getGenericUDF(); } + private static FunctionInfo handleExplicitCast(SqlOperator op, RelDataType dt) { + FunctionInfo castUDF = null; + + if (op.kind == SqlKind.CAST) { + TypeInfo castType = TypeConverter.convert(dt); + + if (castType.equals(TypeInfoFactory.byteTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("tinyint"); + } else if (castType.equals(TypeInfoFactory.charTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("char"); + } else if (castType.equals(TypeInfoFactory.varcharTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("varchar"); + } else if (castType.equals(TypeInfoFactory.stringTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("string"); + } else if (castType.equals(TypeInfoFactory.booleanTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("boolean"); + } else if (castType.equals(TypeInfoFactory.shortTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("smallint"); + } else if (castType.equals(TypeInfoFactory.intTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("int"); + } else if (castType.equals(TypeInfoFactory.longTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("bigint"); + } else if (castType.equals(TypeInfoFactory.floatTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("float"); + } else if (castType.equals(TypeInfoFactory.doubleTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("double"); + } else if (castType.equals(TypeInfoFactory.timestampTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("timestamp"); + } else if (castType.equals(TypeInfoFactory.dateTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("datetime"); + } else if (castType.equals(TypeInfoFactory.decimalTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("decimal"); + } else if (castType.equals(TypeInfoFactory.binaryTypeInfo)) { + castUDF = FunctionRegistry.getFunctionInfo("binary"); + } + } + + return castUDF; + } + // TODO: 1) handle Agg Func Name translation 2) is it correct to add func args // as child of func? public static ASTNode buildAST(SqlOperator op, List children) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java index 3c1fb0d..e8081e3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java @@ -162,7 +162,7 @@ public static PrunedPartitionList prune(Table tab, ExprNodeDesc prunerExpr, LOG.trace("Started pruning partiton"); LOG.trace("dbname = " + tab.getDbName()); LOG.trace("tabname = " + tab.getTableName()); - LOG.trace("prune Expression = " + prunerExpr); +// LOG.trace("prune Expression = " + prunerExpr); String key = tab.getDbName() + "." + tab.getTableName() + ";";