Index: ql/src/test/results/clientpositive/cbo_correctness.q.out =================================================================== --- ql/src/test/results/clientpositive/cbo_correctness.q.out (revision 1619294) +++ ql/src/test/results/clientpositive/cbo_correctness.q.out (working copy) @@ -17849,3 +17849,33 @@ 1 2 1 2 1 12 +PREHOOK: query: -- 13. null expr in select list +select null from t3 +PREHOOK: type: QUERY +PREHOOK: Input: default@t3 +#### A masked pattern was here #### +POSTHOOK: query: -- 13. null expr in select list +select null from t3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t3 +#### A masked pattern was here #### +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL Index: ql/src/test/queries/clientpositive/cbo_correctness.q =================================================================== --- ql/src/test/queries/clientpositive/cbo_correctness.q (revision 1619294) +++ ql/src/test/queries/clientpositive/cbo_correctness.q (working copy) @@ -215,4 +215,5 @@ select * from (select key as a, c_int+1 as b, sum(c_int) as c from t1 where (t1.c_int + 1 >= 0) and (t1.c_int > 0 or t1.c_float >= 0) group by c_float, t1.c_int, key having t1.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0 order by a+b desc, c asc) t1 left semi join (select key as p, c_int+1 as q, sum(c_int) as r from t2 where (t2.c_int + 1 >= 0) and (t2.c_int > 0 or t2.c_float >= 0) group by c_float, t2.c_int, key having t2.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0 order by q+r/10 desc, p) t2 on t1.a=p left semi join t3 on t1.a=key where (b + 1 >= 0) and (b > 0 or a >= 0) group by a, c having a > 0 and (a >=1 or c >= 1) and (a + c) >= 0 order by c, a; select * from (select key as a, c_int+1 as b, sum(c_int) as c from t1 where (t1.c_int + 1 >= 0) and (t1.c_int > 0 or t1.c_float >= 0) group by c_float, t1.c_int, key having t1.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0 order by a+b desc, c asc limit 5) t1 left semi join (select key as p, c_int+1 as q, sum(c_int) as r from t2 where (t2.c_int + 1 >= 0) and (t2.c_int > 0 or t2.c_float >= 0) group by c_float, t2.c_int, key having t2.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0 order by q+r/10 desc, p limit 5) t2 on t1.a=p left semi join t3 on t1.a=key where (b + 1 >= 0) and (b > 0 or a >= 0) group by a, c having a > 0 and (a >=1 or c >= 1) and (a + c) >= 0 order by c, a; - +-- 13. null expr in select list +select null from t3; Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java (revision 1619294) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java (working copy) @@ -123,8 +123,10 @@ /* * 6. Project */ + if (!select.getChildExps().isEmpty()) { + + ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_SELECT, "TOK_SELECT"); int i = 0; - ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_SELECT, "TOK_SELECT"); for (RexNode r : select.getChildExps()) { ASTNode selectExpr = ASTBuilder.selectExpr(r.accept(new RexVisitor(schema)), select @@ -132,7 +134,13 @@ b.add(selectExpr); } hiveAST.select = b.node(); + } else { + //TODO: We should never be here. But we will be for select null from t1. + // Once you figure out why, uncomment following line: + // throw new IllegalStateException("why am I here?"); + } + /* * 7. Order Use in Order By from the block above. RelNode has no pointer to * parent hence we need to go top down; but OB at each block really belong @@ -347,13 +355,13 @@ ASTNode wRangeAst = null; ASTNode startAST = null; - RexWindowBound ub = (RexWindowBound) window.getUpperBound(); + RexWindowBound ub = window.getUpperBound(); if (ub != null) { startAST = getWindowBound(ub); } ASTNode endAST = null; - RexWindowBound lb = (RexWindowBound) window.getLowerBound(); + RexWindowBound lb = window.getLowerBound(); if (lb != null) { endAST = getWindowBound(lb); } Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java (revision 1619293) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java (working copy) @@ -110,8 +110,7 @@ switch (type.getPrimitiveCategory()) { case VOID: - // @todo: followup on VOID type in hive - convertedType = dtFactory.createSqlType(SqlTypeName.OTHER); + convertedType = dtFactory.createSqlType(SqlTypeName.NULL); break; case BOOLEAN: convertedType = dtFactory.createSqlType(SqlTypeName.BOOLEAN); @@ -135,6 +134,7 @@ convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE); break; case STRING: + //TODO: shall we pass -1 for len to distinguish between STRING & VARCHAR on way out convertedType = dtFactory.createSqlType(SqlTypeName.VARCHAR, 1); break; case DATE: @@ -163,6 +163,10 @@ break; } + if (null == convertedType) { + throw new RuntimeException("Unsupported Type : " + type.getTypeName()); + } + return convertedType; } @@ -184,6 +188,7 @@ List fTypes = Lists.transform( structType.getAllStructFieldTypeInfos(), new Function() { + @Override public RelDataType apply(TypeInfo tI) { return convert(tI, dtFactory); } @@ -214,6 +219,7 @@ List fTypes = Lists.transform( rType.getFieldList(), new Function() { + @Override public TypeInfo apply(RelDataTypeField f) { return convert(f.getType()); } @@ -221,6 +227,7 @@ List fNames = Lists.transform( rType.getFieldList(), new Function() { + @Override public String apply(RelDataTypeField f) { return f.getName(); } Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java (revision 1619293) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java (working copy) @@ -15,11 +15,11 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseNumeric; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToBinary; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToChar; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate; @@ -40,7 +40,6 @@ import org.eigenbase.rex.RexUtil; import org.eigenbase.sql.SqlOperator; import org.eigenbase.sql.fun.SqlCastFunction; -import org.eigenbase.sql.type.SqlTypeName; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList.Builder; @@ -81,6 +80,10 @@ } public RexNode convert(ExprNodeDesc expr) throws SemanticException { + if (expr instanceof ExprNodeNullDesc) { + return m_cluster.getRexBuilder().makeNullLiteral(TypeConverter.convert( + expr.getTypeInfo(), m_cluster.getRexBuilder().getTypeFactory()).getSqlTypeName()); + } if (expr instanceof ExprNodeGenericFuncDesc) { return convert((ExprNodeGenericFuncDesc) expr); } else if (expr instanceof ExprNodeConstantDesc) { @@ -90,8 +93,7 @@ } else { throw new RuntimeException("Unsupported Expression"); } - // TODO: handle a) ExprNodeNullDesc b) ExprNodeFieldDesc c) - // ExprNodeColumnListDesc + // TODO: handle a) ExprNodeFieldDesc b) ExprNodeColumnListDesc } private RexNode convert(final ExprNodeGenericFuncDesc func) throws SemanticException { @@ -230,9 +232,10 @@ RelDataType optiqDataType = TypeConverter.convert(hiveType, dtFactory); PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory(); - RexNode optiqLiteral = null; + Object value = literal.getValue(); + RexNode optiqLiteral = null; // TODO: Verify if we need to use ConstantObjectInspector to unwrap data switch (hiveTypeCategory) { case BOOLEAN: Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java (revision 1619293) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java (working copy) @@ -142,6 +142,9 @@ type = ((Boolean) val).booleanValue() ? HiveParser.KW_TRUE : HiveParser.KW_FALSE; break; + case NULL: + type = HiveParser.TOK_NULL; + break; default: throw new RuntimeException("Unsupported Type: " + sqlType); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 1619294) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -13147,14 +13147,8 @@ } selectStar = selectStar && exprList.getChildCount() == posn + 1; - // 7. Replace NULL with CAST(NULL AS STRING) ArrayList columnNames = new ArrayList(); for (int i = 0; i < col_list.size(); i++) { - // Replace NULL with CAST(NULL AS STRING) - if (col_list.get(i) instanceof ExprNodeNullDesc) { - col_list.set(i, new ExprNodeConstantDesc( - TypeInfoFactory.stringTypeInfo, null)); - } columnNames.add(getColumnInternalName(i)); }