diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java index ff0ddc8..f08321c 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java @@ -27,7 +27,8 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.io.NullWritable; /** * ExprNodeEvaluatorFactory. @@ -55,11 +56,6 @@ public static ExprNodeEvaluator get(ExprNodeDesc desc) throws HiveException { if (desc instanceof ExprNodeFieldDesc) { return new ExprNodeFieldEvaluator((ExprNodeFieldDesc) desc); } - // Null node, a constant node with value NULL and no type information - if (desc instanceof ExprNodeNullDesc) { - return new ExprNodeNullEvaluator((ExprNodeNullDesc) desc); - } - throw new RuntimeException( "Cannot find ExprNodeEvaluator for the exprNodeDesc = " + desc); } @@ -114,14 +110,14 @@ private static ExprNodeEvaluator iterate(ExprNodeEvaluator eval, EvaluatorContex private static class EvaluatorContext { - private final Map cached = + private final Map cached = new HashMap(); private boolean hasReference; public ExprNodeEvaluator getEvaluated(ExprNodeEvaluator eval) { - ExprNodeDesc.ExprNodeDescEqualityWrapper key = - new ExprNodeDesc.ExprNodeDescEqualityWrapper(eval.expr); + ExprNodeDesc.ExprNodeDescEqualityWrapper key = + new ExprNodeDesc.ExprNodeDescEqualityWrapper(eval.expr); ExprNodeEvaluator prev = cached.get(key); if (prev == null) { cached.put(key, eval); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java index b695bef..b09b706 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java @@ -78,9 +78,10 @@ public void prepare(int version) throws HiveException { } public boolean needsPrepare() { - return !(eval instanceof ExprNodeConstantEvaluator || eval instanceof ExprNodeNullEvaluator); + return !(eval instanceof ExprNodeConstantEvaluator); } + @Override public Object get() throws HiveException { if (!evaluated) { obj = eval.evaluate(rowObject, version); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java deleted file mode 100644 index 3aaf17c..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec; - -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; - -// This function will not be used currently, since the function expressions -// change the void to the first matching argument -/** - * ExprNodeNullEvaluator. - * - */ -public class ExprNodeNullEvaluator extends ExprNodeEvaluator { - - public ExprNodeNullEvaluator(ExprNodeNullDesc expr) { - super(expr); - } - - @Override - public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { - return outputOI = PrimitiveObjectInspectorFactory.writableVoidObjectInspector; - } - - @Override - protected Object _evaluate(Object row, int version) throws HiveException { - return null; - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 14a1059..ac18069 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -90,7 +90,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.ql.plan.GroupByDesc; import org.apache.hadoop.hive.ql.udf.SettableUDF; import org.apache.hadoop.hive.ql.udf.UDFConv; @@ -281,7 +280,7 @@ protected int getInputColumnIndex(String name) throws HiveException { throw new HiveException("Null column name"); } if (!projectionColumnMap.containsKey(name)) { - throw new HiveException(String.format("The column %s is not in the vectorization context column map %s.", + throw new HiveException(String.format("The column %s is not in the vectorization context column map %s.", name, projectionColumnMap.toString())); } return projectionColumnMap.get(name); @@ -424,7 +423,7 @@ public VectorExpression getVectorExpression(ExprNodeDesc exprDesc, Mode mode) th ve = getGenericUdfVectorExpression(expr.getGenericUDF(), childExpressions, mode, exprDesc.getTypeInfo()); } - } else if (exprDesc instanceof ExprNodeNullDesc) { + } else if (exprDesc instanceof ExprNodeConstantDesc && null == ((ExprNodeConstantDesc)exprDesc).getValue()) { ve = getConstantVectorExpression(null, exprDesc.getTypeInfo(), mode); } else if (exprDesc instanceof ExprNodeConstantDesc) { ve = getConstantVectorExpression(((ExprNodeConstantDesc) exprDesc).getValue(), exprDesc.getTypeInfo(), @@ -1321,10 +1320,10 @@ private HiveDecimal castConstantToDecimal(Object scalar, TypeInfo type) throws H HiveDecimal rawDecimal; switch (ptinfo.getPrimitiveCategory()) { case FLOAT: - rawDecimal = HiveDecimal.create(String.valueOf((Float) scalar)); + rawDecimal = HiveDecimal.create(String.valueOf(scalar)); break; case DOUBLE: - rawDecimal = HiveDecimal.create(String.valueOf((Double) scalar)); + rawDecimal = HiveDecimal.create(String.valueOf(scalar)); break; case BYTE: rawDecimal = HiveDecimal.create((Byte) scalar); @@ -1419,7 +1418,7 @@ private VectorExpression getCastToDecimal(List childExpr, TypeInfo Object constantValue = ((ExprNodeConstantDesc) child).getValue(); HiveDecimal decimalValue = castConstantToDecimal(constantValue, child.getTypeInfo()); return getConstantVectorExpression(decimalValue, returnType, Mode.PROJECTION); - } else if (child instanceof ExprNodeNullDesc) { + } else if (child instanceof ExprNodeConstantDesc && null == ((ExprNodeConstantDesc)child).getValue()) { return getConstantVectorExpression(null, returnType, Mode.PROJECTION); } if (isIntFamily(inputType)) { @@ -1446,7 +1445,7 @@ private VectorExpression getCastToString(List childExpr, TypeInfo Object constantValue = ((ExprNodeConstantDesc) child).getValue(); String strValue = castConstantToString(constantValue, child.getTypeInfo()); return getConstantVectorExpression(strValue, returnType, Mode.PROJECTION); - } else if (child instanceof ExprNodeNullDesc) { + } else if (child instanceof ExprNodeConstantDesc && null == ((ExprNodeConstantDesc)child).getValue()) { return getConstantVectorExpression(null, returnType, Mode.PROJECTION); } if (inputType.equals("boolean")) { @@ -1488,7 +1487,7 @@ private VectorExpression getCastToChar(List childExpr, TypeInfo re return createVectorExpression(CastStringGroupToChar.class, childExpr, Mode.PROJECTION, returnType); } - /* + /* * Timestamp, float, and double types are handled by the legacy code path. See isLegacyPathUDF. */ @@ -1533,7 +1532,7 @@ private VectorExpression getCastToDoubleExpression(Class udf, List childExpr) // Don't do constant folding here. Wait until the optimizer is changed to do it. // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424. return null; - } else if (child instanceof ExprNodeNullDesc) { + } else if (child instanceof ExprNodeConstantDesc && null == ((ExprNodeConstantDesc)child).getValue()) { return getConstantVectorExpression(null, TypeInfoFactory.booleanTypeInfo, Mode.PROJECTION); } // Long and double are handled using descriptors, string needs to be specially handled. @@ -1589,7 +1588,7 @@ private VectorExpression getCastToLongExpression(List childExpr) Object constantValue = ((ExprNodeConstantDesc) child).getValue(); Long longValue = castConstantToLong(constantValue, child.getTypeInfo()); return getConstantVectorExpression(longValue, TypeInfoFactory.longTypeInfo, Mode.PROJECTION); - } else if (child instanceof ExprNodeNullDesc) { + } else if (child instanceof ExprNodeConstantDesc && null == ((ExprNodeConstantDesc)child).getValue()) { return getConstantVectorExpression(null, TypeInfoFactory.longTypeInfo, Mode.PROJECTION); } // Float family, timestamp are handled via descriptor based lookup, int family needs @@ -1836,7 +1835,7 @@ private Object getScalarValue(ExprNodeConstantDesc constDesc) return 0; } } else if (decimalTypePattern.matcher(constDesc.getTypeString()).matches()) { - return (HiveDecimal) constDesc.getValue(); + return constDesc.getValue(); } else { return constDesc.getValue(); } @@ -1976,7 +1975,7 @@ static String getNormalizedName(String hiveTypeName) { return "None"; } } - + static String getUndecoratedName(String hiveTypeName) { VectorExpressionDescriptor.ArgumentType argType = VectorExpressionDescriptor.ArgumentType.fromHiveTypeName(hiveTypeName); switch (argType) { @@ -2005,7 +2004,7 @@ static String getUndecoratedName(String hiveTypeName) { } // TODO: When we support vectorized STRUCTs and can handle more in the reduce-side (MERGEPARTIAL): - // TODO: Write reduce-side versions of AVG. Currently, only map-side (HASH) versions are in table. + // TODO: Write reduce-side versions of AVG. Currently, only map-side (HASH) versions are in table. // TODO: And, investigate if different reduce-side versions are needed for var* and std*, or if map-side aggregate can be used.. Right now they are conservatively // marked map-side (HASH). static ArrayList aggregatesDefinition = new ArrayList() {{ @@ -2115,6 +2114,7 @@ public VectorAggregateExpression getAggregatorExpression(AggregationDesc desc, b return map; } + @Override public String toString() { StringBuilder sb = new StringBuilder(32); sb.append("Context key ").append(getFileKey()).append(", "); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java index b0768f2..5731d6a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java @@ -53,7 +53,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.GroupByDesc; import org.apache.hadoop.hive.ql.plan.JoinCondDesc; @@ -81,7 +80,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.apache.hadoop.io.NullWritable; import com.google.common.collect.ImmutableSet; @@ -133,7 +134,7 @@ public static ColumnInfo resolveColumn(RowSchema rs, * @return cast constant, or null if the type cast failed. */ private static ExprNodeConstantDesc typeCast(ExprNodeDesc desc, TypeInfo ti) { - if (desc instanceof ExprNodeNullDesc) { + if (desc instanceof ExprNodeConstantDesc && null == ((ExprNodeConstantDesc)desc).getValue()) { return null; } if (!(ti instanceof PrimitiveTypeInfo) || !(desc.getTypeInfo() instanceof PrimitiveTypeInfo)) { @@ -343,7 +344,7 @@ private static void propagate(GenericUDF udf, List newExprs, RowSc ExprNodeColumnDesc c = (ExprNodeColumnDesc) operand; ColumnInfo ci = resolveColumn(rs, c); if (ci != null) { - constants.put(ci, new ExprNodeNullDesc()); + constants.put(ci, new ExprNodeConstantDesc(null, TypeInfoFactory.getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class))); } } } @@ -487,7 +488,7 @@ private static ExprNodeDesc evaluateFunction(GenericUDF udf, List argois[i] = ObjectInspectorUtils.getConstantObjectInspector(constant.getWritableObjectInspector(), writableValue); - } else if (desc instanceof ExprNodeNullDesc) { + } else if (desc instanceof ExprNodeConstantDesc && null == ((ExprNodeConstantDesc)desc).getValue()) { // FIXME: add null support. return null; @@ -511,7 +512,7 @@ private static ExprNodeDesc evaluateFunction(GenericUDF udf, List Object o = udf.evaluate(arguments); LOG.debug(udf.getClass().getName() + "(" + exprs + ")=" + o); if (o == null) { - return new ExprNodeNullDesc(); + return new ExprNodeConstantDesc(null, TypeInfoFactory.getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class)); } Class clz = o.getClass(); if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(clz)) { @@ -941,7 +942,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object.. List newExprs = new ArrayList(); for (ExprNodeDesc expr : exprs) { ExprNodeDesc newExpr = foldExpr(expr, constants, cppCtx, op, tag, false); - if (newExpr instanceof ExprNodeConstantDesc || newExpr instanceof ExprNodeNullDesc) { + if (newExpr instanceof ExprNodeConstantDesc) { LOG.info("expr " + newExpr + " fold from " + expr + " is removed."); continue; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java index 1e47fcb..af54286 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java @@ -55,7 +55,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.ql.plan.GroupByDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.SelectDesc; @@ -340,9 +339,6 @@ protected GroupByOptimizerSortMatch checkSortGroupBy(Stack stack, } else { tableColsMapping.remove(outputColumnName); - if (selectCol instanceof ExprNodeNullDesc) { - newConstantCols.add(outputColumnName); - } if (selectCol instanceof ExprNodeConstantDesc) { // Lets see if this constant was folded because of optimization. String origCol = ((ExprNodeConstantDesc) selectCol).getFoldedFromCol(); @@ -380,8 +376,7 @@ protected GroupByOptimizerSortMatch checkSortGroupBy(Stack stack, } } // Constants and nulls are OK - else if ((expr instanceof ExprNodeConstantDesc) || - (expr instanceof ExprNodeNullDesc)) { + else if (expr instanceof ExprNodeConstantDesc) { continue; } else { return GroupByOptimizerSortMatch.NO_MATCH; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java index e633fdc..306e714 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; /** * Expression processor factory for pruning. Each processor tries to @@ -182,8 +181,6 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { if (nd instanceof ExprNodeConstantDesc) { return ((ExprNodeConstantDesc) nd).clone(); - } else if (nd instanceof ExprNodeNullDesc) { - return ((ExprNodeNullDesc) nd).clone(); } return new ExprNodeConstantDesc(((ExprNodeDesc)nd).getTypeInfo(), null); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java index 0328007..317454d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java @@ -64,7 +64,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.ListSinkDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; @@ -262,7 +261,6 @@ private boolean checkExpressions(SelectOperator op) { private boolean checkExpression(ExprNodeDesc expr) { if (expr instanceof ExprNodeConstantDesc || - expr instanceof ExprNodeNullDesc|| expr instanceof ExprNodeColumnDesc) { return true; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java index abd7afd..3d05161 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java @@ -63,7 +63,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseBinary; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare; @@ -124,9 +123,7 @@ public RexNodeConverter(RelOptCluster cluster, List inpCtxLst, boolean } public RexNode convert(ExprNodeDesc expr) throws SemanticException { - if (expr instanceof ExprNodeNullDesc) { - return createNullLiteral(expr); - } else if (expr instanceof ExprNodeGenericFuncDesc) { + if (expr instanceof ExprNodeGenericFuncDesc) { return convert((ExprNodeGenericFuncDesc) expr); } else if (expr instanceof ExprNodeConstantDesc) { return convert((ExprNodeConstantDesc) expr); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java index 86d221d..c930b80 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java @@ -49,7 +49,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; /** @@ -136,7 +135,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, @Override public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { - assert (nd instanceof ExprNodeConstantDesc || nd instanceof ExprNodeNullDesc); + assert (nd instanceof ExprNodeConstantDesc); // Create a dependency that has no basecols Dependency dep = new Dependency(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java index cbd4e6c..d5102bc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java @@ -46,7 +46,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; @@ -392,7 +391,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, @Override public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { - if (nd instanceof ExprNodeConstantDesc || nd instanceof ExprNodeNullDesc) { + if (nd instanceof ExprNodeConstantDesc) { return new NodeInfoWrapper(WalkState.CONSTANT, null, (ExprNodeDesc) nd); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java index 01398f0..cc0a7d1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java @@ -45,7 +45,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.SelectDesc; @@ -291,8 +290,7 @@ private static boolean genColNameMap( continue; } - if ((colExpr instanceof ExprNodeConstantDesc) || - (colExpr instanceof ExprNodeNullDesc)) { + if (colExpr instanceof ExprNodeConstantDesc) { currColNames.remove(outputColName); continue; } else if (colExpr instanceof ExprNodeColumnDesc) { @@ -317,7 +315,7 @@ private static boolean genColNameMap( if (expr instanceof ExprNodeColumnDesc) { ExprNodeColumnDesc colExpr = (ExprNodeColumnDesc)expr; colList.add(colExpr.getColumn()); - } else if (expr instanceof ExprNodeConstantDesc || expr instanceof ExprNodeNullDesc) { + } else if (expr instanceof ExprNodeConstantDesc) { continue; } else { return null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index 79d38bc..857410d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -59,7 +59,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.ql.udf.SettableUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare; @@ -78,6 +77,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; +import org.apache.hadoop.io.NullWritable; import org.apache.hive.common.util.DateUtils; import com.google.common.collect.Lists; @@ -240,7 +240,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, return desc; } - return new ExprNodeNullDesc(); + return new ExprNodeConstantDesc(null, TypeInfoFactory.getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class)); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java index b15df0f..89a175e 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java @@ -84,7 +84,6 @@ public Object getValue() { @Override public ConstantObjectInspector getWritableObjectInspector() { PrimitiveTypeInfo pti = (PrimitiveTypeInfo) getTypeInfo(); - PrimitiveCategory pc = pti.getPrimitiveCategory(); // Convert from Java to Writable Object writableValue = PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(pti).getPrimitiveWritableObject( diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeNullDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeNullDesc.java deleted file mode 100644 index 25b16da..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeNullDesc.java +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; - -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.apache.hadoop.io.NullWritable; - -/** - * ExprNodeNullDesc. - * - */ -public class ExprNodeNullDesc extends ExprNodeDesc implements Serializable { - - private static final long serialVersionUID = 1L; - - public ExprNodeNullDesc() { - super(TypeInfoFactory - .getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class)); - } - - public Object getValue() { - return null; - } - - @Override - public String toString() { - return "null"; - } - - @Override - public String getExprString() { - return "null"; - } - - @Override - public ExprNodeDesc clone() { - return new ExprNodeNullDesc(); - } - - @Override - public boolean isSame(Object o) { - if (!(o instanceof ExprNodeNullDesc)) { - return false; - } - if (!typeInfo.equals(((ExprNodeNullDesc) o).getTypeInfo())) { - return false; - } - - return true; - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java index 508d880..10871e4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java @@ -50,7 +50,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; import org.apache.hadoop.hive.ql.plan.Statistics; import org.apache.hadoop.hive.ql.plan.Statistics.State; import org.apache.hadoop.hive.ql.util.JavaDataModel; @@ -1151,13 +1150,6 @@ public static ColStatistics getColStatisticsFromExpression(HiveConf conf, Statis colType = engfd.getTypeString(); countDistincts = numRows; oi = engfd.getWritableObjectInspector(); - } else if (end instanceof ExprNodeNullDesc) { - - // null projection - ExprNodeNullDesc ennd = (ExprNodeNullDesc) end; - colName = ennd.getName(); - colType = "null"; - numNulls = numRows; } else if (end instanceof ExprNodeColumnListDesc) { // column list @@ -1473,7 +1465,7 @@ public static long safeMult(long a, double b) { double result = a * b; return (result > Long.MAX_VALUE) ? Long.MAX_VALUE : (long)result; } - + /** Bounded addition - overflows become MAX_VALUE */ public static long safeAdd(long a, long b) { try { @@ -1482,7 +1474,7 @@ public static long safeAdd(long a, long b) { return Long.MAX_VALUE; } } - + /** Bounded multiplication - overflows become MAX_VALUE */ public static long safeMult(long a, long b) { try {