diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 9179efd..c859dcb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -1050,6 +1050,7 @@ public VectorAggregateExpression getAggregatorExpression(AggregationDesc desc) aggClass.getConstructor(VectorExpression.class); VectorAggregateExpression aggExpr = ctor.newInstance( vectorParams.length > 0 ? vectorParams[0] : null); + aggExpr.init(desc); return aggExpr; } // TODO: change to 1.7 syntax when possible diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorAggregateExpression.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorAggregateExpression.java index e98fae3..c56cb11 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorAggregateExpression.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorAggregateExpression.java @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; public abstract class VectorAggregateExpression { @@ -42,5 +43,7 @@ public abstract void aggregateInputSelection(VectorAggregationBufferRow[] aggreg public boolean hasVariableSize() { return false; } + + public abstract void init(AggregationDesc desc) throws HiveException; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java index 0aa8537..35b85d0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java @@ -24,10 +24,13 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; + /** * VectorUDAFCountLong. Vectorized implementation for COUNT aggregates. @@ -236,7 +239,7 @@ public void reset(AggregationBuffer agg) throws HiveException { public Object evaluateOutput(AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { result.set (myagg.value); @@ -258,5 +261,10 @@ public int getAggregationBufferFixedSize() { model.primitive1(), model.memoryAlign()); } + + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java index 3961171..f725f27 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java @@ -23,10 +23,12 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; /** * VectorUDAFCountStar. Vectorized implementation for COUNT(*) aggregates. @@ -114,7 +116,7 @@ public void reset(AggregationBuffer agg) throws HiveException { public Object evaluateOutput(AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { result.set (myagg.value); @@ -136,5 +138,10 @@ public int getAggregationBufferFixedSize() { model.primitive1(), model.memoryAlign()); } + + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFAvgDouble.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFAvgDouble.java index 27c69b2..cf009bf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFAvgDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFAvgDouble.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -427,7 +429,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -450,5 +452,10 @@ public int getAggregationBufferFixedSize() { model.primitive2() * 2, model.memoryAlign()); } + + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFAvgLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFAvgLong.java index e3b0406..4a4096e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFAvgLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFAvgLong.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -427,7 +429,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -450,5 +452,10 @@ public int getAggregationBufferFixedSize() { model.primitive2() * 2, model.memoryAlign()); } + + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxDouble.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxDouble.java index f22e912..4c95ec9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxDouble.java @@ -25,13 +25,17 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates. VectorAggregateExpression.AggregationBuffer; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -72,12 +76,17 @@ public int getVariableSize() { } private VectorExpression inputExpression; - private DoubleWritable result; + private VectorExpressionWriter resultWriter; public VectorUDAFMaxDouble(VectorExpression inputExpression) { super(); this.inputExpression = inputExpression; - result = new DoubleWritable(); + } + + @Override + public void init(AggregationDesc desc) throws HiveException { + resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( + desc.getParameters().get(0)); } private Aggregation getCurrentAggregationBuffer( @@ -405,17 +414,16 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { - result.set(myagg.value); - return result; + return resultWriter.writeValue(myagg.value); } } @Override public ObjectInspector getOutputObjectInspector() { - return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector; + return resultWriter.getObjectInspector(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxLong.java index a86585f..a41b2b2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxLong.java @@ -25,13 +25,17 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates. VectorAggregateExpression.AggregationBuffer; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -72,12 +76,17 @@ public int getVariableSize() { } private VectorExpression inputExpression; - private LongWritable result; + private VectorExpressionWriter resultWriter; public VectorUDAFMaxLong(VectorExpression inputExpression) { super(); this.inputExpression = inputExpression; - result = new LongWritable(); + } + + @Override + public void init(AggregationDesc desc) throws HiveException { + resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( + desc.getParameters().get(0)); } private Aggregation getCurrentAggregationBuffer( @@ -405,17 +414,16 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { - result.set(myagg.value); - return result; + return resultWriter.writeValue(myagg.value); } } @Override public ObjectInspector getOutputObjectInspector() { - return PrimitiveObjectInspectorFactory.writableLongObjectInspector; + return resultWriter.getObjectInspector(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxString.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxString.java index 02d54a2..03b2262 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxString.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMaxString.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -39,6 +40,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.NullWritable; /** * VectorUDAFMaxString. Vectorized implementation for MIN/MAX aggregates. @@ -354,7 +356,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { result.set(myagg.bytes, 0, myagg.length); @@ -382,5 +384,9 @@ public boolean hasVariableSize() { return true; } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinDouble.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinDouble.java index 95d687f..2d96434 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinDouble.java @@ -25,13 +25,17 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates. VectorAggregateExpression.AggregationBuffer; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -72,12 +76,17 @@ public int getVariableSize() { } private VectorExpression inputExpression; - private DoubleWritable result; + private VectorExpressionWriter resultWriter; public VectorUDAFMinDouble(VectorExpression inputExpression) { super(); this.inputExpression = inputExpression; - result = new DoubleWritable(); + } + + @Override + public void init(AggregationDesc desc) throws HiveException { + resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( + desc.getParameters().get(0)); } private Aggregation getCurrentAggregationBuffer( @@ -405,17 +414,16 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { - result.set(myagg.value); - return result; + return resultWriter.writeValue(myagg.value); } } @Override public ObjectInspector getOutputObjectInspector() { - return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector; + return resultWriter.getObjectInspector(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinLong.java index 7dacf05..1fb1517 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinLong.java @@ -25,13 +25,17 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates. VectorAggregateExpression.AggregationBuffer; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -72,12 +76,17 @@ public int getVariableSize() { } private VectorExpression inputExpression; - private LongWritable result; + private VectorExpressionWriter resultWriter; public VectorUDAFMinLong(VectorExpression inputExpression) { super(); this.inputExpression = inputExpression; - result = new LongWritable(); + } + + @Override + public void init(AggregationDesc desc) throws HiveException { + resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( + desc.getParameters().get(0)); } private Aggregation getCurrentAggregationBuffer( @@ -405,17 +414,16 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { - result.set(myagg.value); - return result; + return resultWriter.writeValue(myagg.value); } } @Override public ObjectInspector getOutputObjectInspector() { - return PrimitiveObjectInspectorFactory.writableLongObjectInspector; + return resultWriter.getObjectInspector(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinString.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinString.java index 5acbf8d..67ac6b6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinString.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFMinString.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -39,6 +40,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.NullWritable; /** * VectorUDAFMinString. Vectorized implementation for MIN/MAX aggregates. @@ -354,7 +356,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { result.set(myagg.bytes, 0, myagg.length); @@ -382,5 +384,9 @@ public boolean hasVariableSize() { return true; } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdPopDouble.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdPopDouble.java index af854df..b7df924 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdPopDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdPopDouble.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -474,7 +476,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -499,5 +501,11 @@ public int getAggregationBufferFixedSize() { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdPopLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdPopLong.java index 31e8438..3504a51 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdPopLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdPopLong.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -474,7 +476,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -499,5 +501,11 @@ public int getAggregationBufferFixedSize() { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdSampDouble.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdSampDouble.java index 72a1610..63b85a1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdSampDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdSampDouble.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -474,7 +476,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -499,5 +501,11 @@ public int getAggregationBufferFixedSize() { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdSampLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdSampLong.java index 6696ddf..8c57088 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdSampLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFStdSampLong.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -474,7 +476,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -499,5 +501,11 @@ public int getAggregationBufferFixedSize() { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFSumDouble.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFSumDouble.java index 64e379e..43cd34e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFSumDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFSumDouble.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -398,7 +400,7 @@ public void reset(AggregationBuffer agg) throws HiveException { public Object evaluateOutput(AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { result.set(myagg.sum); @@ -419,6 +421,11 @@ public int getAggregationBufferFixedSize() { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFSumLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFSumLong.java index 0799570..2f5a89c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFSumLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFSumLong.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -398,7 +400,7 @@ public void reset(AggregationBuffer agg) throws HiveException { public Object evaluateOutput(AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { result.set(myagg.sum); @@ -419,6 +421,11 @@ public int getAggregationBufferFixedSize() { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarPopDouble.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarPopDouble.java index bae5e79..c7f9458 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarPopDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarPopDouble.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -474,7 +476,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -499,5 +501,11 @@ public int getAggregationBufferFixedSize() { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarPopLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarPopLong.java index 042ba17..6988f14 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarPopLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarPopLong.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -474,7 +476,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -499,5 +501,11 @@ public int getAggregationBufferFixedSize() { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarSampDouble.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarSampDouble.java index 05449fb..93a5250 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarSampDouble.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarSampDouble.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -474,7 +476,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -499,5 +501,11 @@ public int getAggregationBufferFixedSize() { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarSampLong.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarSampLong.java index fd4e322..6e3d4e6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarSampLong.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/VectorUDAFVarSampLong.java @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -474,7 +476,7 @@ public Object evaluateOutput( AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -499,5 +501,11 @@ public int getAggregationBufferFixedSize() { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFAvg.txt ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFAvg.txt index 12608c2..ee6a27c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFAvg.txt +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFAvg.txt @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -427,7 +429,7 @@ public class extends VectorAggregateExpression { AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -450,5 +452,10 @@ public class extends VectorAggregateExpression { model.primitive2() * 2, model.memoryAlign()); } + + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFMinMax.txt ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFMinMax.txt index 0637f8f..40cd4e4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFMinMax.txt +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFMinMax.txt @@ -25,13 +25,17 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates. VectorAggregateExpression.AggregationBuffer; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -72,12 +76,17 @@ public class extends VectorAggregateExpression { } private VectorExpression inputExpression; - private result; + private VectorExpressionWriter resultWriter; public (VectorExpression inputExpression) { super(); this.inputExpression = inputExpression; - result = new (); + } + + @Override + public void init(AggregationDesc desc) throws HiveException { + resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable( + desc.getParameters().get(0)); } private Aggregation getCurrentAggregationBuffer( @@ -405,17 +414,16 @@ public class extends VectorAggregateExpression { AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { - result.set(myagg.value); - return result; + return resultWriter.writeValue(myagg.value); } } @Override public ObjectInspector getOutputObjectInspector() { - return ; + return resultWriter.getObjectInspector(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFMinMaxString.txt ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFMinMaxString.txt index 43c1a04..028628a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFMinMaxString.txt +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFMinMaxString.txt @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggreg import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -39,6 +40,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspe import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.NullWritable; /** * . Vectorized implementation for MIN/MAX aggregates. @@ -354,7 +356,7 @@ public class extends VectorAggregateExpression { AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { result.set(myagg.bytes, 0, myagg.length); @@ -382,5 +384,9 @@ public class extends VectorAggregateExpression { return true; } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFSum.txt ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFSum.txt index 6db3aa4..586628b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFSum.txt +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFSum.txt @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -398,7 +400,7 @@ public class extends VectorAggregateExpression { public Object evaluateOutput(AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { result.set(myagg.sum); @@ -419,6 +421,11 @@ public class extends VectorAggregateExpression { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFVar.txt ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFVar.txt index 96d0f40..0692808 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFVar.txt +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/templates/VectorUDAFVar.txt @@ -29,8 +29,10 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -474,7 +476,7 @@ public class extends VectorAggregateExpression { AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; if (myagg.isNull) { - return null; + return NullWritable.get(); } else { assert(0 < myagg.count); @@ -499,5 +501,11 @@ public class extends VectorAggregateExpression { model.memoryAlign()); } + @Override + public void init(AggregationDesc desc) throws HiveException { + // No-op + } + + } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java index 8dca09f..ba6e8c4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java @@ -21,7 +21,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.lang.reflect.Constructor; @@ -54,6 +53,7 @@ import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.junit.Assert; import org.junit.Test; @@ -1494,7 +1494,7 @@ public void validate(Object expected, Object result) { assertEquals(1, arr.length); if (expected == null) { - assertNull (arr[0]); + Assert.assertSame(NullWritable.get(), arr[0]); } else if (arr[0] instanceof LongWritable) { LongWritable lw = (LongWritable) arr[0]; assertEquals((Long) expected, (Long) lw.get()); @@ -1523,7 +1523,7 @@ public void validate(Object expected, Object result) { assertEquals (1, arr.length); if (expected == null) { - assertNull (arr[0]); + Assert.assertSame(NullWritable.get(), arr[0]); } else { assertEquals (true, arr[0] instanceof Object[]); Object[] vals = (Object[]) arr[0]; @@ -1551,7 +1551,7 @@ public void validate(Object expected, Object result) { assertEquals (1, arr.length); if (expected == null) { - assertNull (arr[0]); + Assert.assertSame(NullWritable.get(), arr[0]); } else { assertEquals (true, arr[0] instanceof Object[]); Object[] vals = (Object[]) arr[0];