From 8c1276dd8b2d9fe4b331c090c71c6206fcb5cc91 Mon Sep 17 00:00:00 2001 From: Deepesh Khandelwal Date: Sat, 12 Jul 2014 13:40:11 -0700 Subject: [PATCH] HIVE-7054: Support ELT UDF in vectorized mode --- .../hive/ql/exec/vector/VectorizationContext.java | 136 +++++++++--------- .../hive/ql/exec/vector/expressions/VectorElt.java | 137 ++++++++++++++++++ .../hive/ql/optimizer/physical/Vectorizer.java | 1 + ql/src/test/queries/clientpositive/vector_elt.q | 34 +++++ .../test/results/clientpositive/vector_elt.q.out | 154 +++++++++++++++++++++ 5 files changed, 399 insertions(+), 63 deletions(-) create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorElt.java create mode 100644 ql/src/test/queries/clientpositive/vector_elt.q create mode 100644 ql/src/test/results/clientpositive/vector_elt.q.out diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 9772b4d..09a9479 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -100,6 +100,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; /** * Context class for vectorization execution. @@ -392,21 +393,23 @@ private TypeInfo getCommonTypeForChildExpressions(GenericUDF genericUdf, List childrenWithCasts = new ArrayList(); - boolean atleastOneCastNeeded = false; - for (ExprNodeDesc child : children) { - ExprNodeDesc castExpression = getImplicitCastExpression(genericUDF, child, commonType); - if (castExpression != null) { - atleastOneCastNeeded = true; - childrenWithCasts.add(castExpression); - } else { - childrenWithCasts.add(child); + if (genericUDF instanceof GenericUDFElt) { + int i = 0; + for (ExprNodeDesc child : children) { + TypeInfo castType = commonType; + if (i++ == 0) { + castType = isIntFamily(child.getTypeString()) ? child.getTypeInfo() : TypeInfoFactory.intTypeInfo; + } + ExprNodeDesc desc = getImplicitCastExpression(castType, child, genericUDF); + childrenWithCasts.add(desc); } - } - if (atleastOneCastNeeded) { - return childrenWithCasts; } else { - return children; + for (ExprNodeDesc child : children) { + ExprNodeDesc desc = getImplicitCastExpression(commonType, child, genericUDF); + childrenWithCasts.add(desc); + } } + return childrenWithCasts; } private boolean isExcludedFromCast(GenericUDF genericUDF) { @@ -439,67 +442,43 @@ private TypeInfo updatePrecision(TypeInfo inputTypeInfo, DecimalTypeInfo returnT return new DecimalTypeInfo(precision, scale); } + private int getPrecisionForType(PrimitiveTypeInfo typeInfo) { + if (isFloatFamily(typeInfo.getTypeName())) { + return HiveDecimal.MAX_PRECISION; + } + return HiveDecimalUtils.getPrecisionForType(typeInfo); + } + /** - * The GenericUDFs might need their children output to be cast to the given castType. - * This method returns a cast expression that would achieve the required casting. + * This method returns a cast expression for a given castType wrapping the childExpression. */ - private ExprNodeDesc getImplicitCastExpression(GenericUDF udf, ExprNodeDesc child, TypeInfo castType) + private ExprNodeDesc getImplicitCastExpression(TypeInfo castType, ExprNodeDesc child, GenericUDF udf) throws HiveException { - TypeInfo inputTypeInfo = child.getTypeInfo(); - String inputTypeString = inputTypeInfo.getTypeName(); - String castTypeString = castType.getTypeName(); + TypeInfo childTypeInfo = child.getTypeInfo(); + boolean isChildTypeDecimal = decimalTypePattern.matcher(childTypeInfo.getTypeName()).matches(); + boolean isCastTypeDecimal = decimalTypePattern.matcher(castType.getTypeName()).matches(); + if (castType.equals(childTypeInfo) || (isChildTypeDecimal && isCastTypeDecimal)) { - if (inputTypeString.equals(castTypeString)) { - // Nothing to be done - return null; - } - boolean inputTypeDecimal = false; - boolean castTypeDecimal = false; - if (decimalTypePattern.matcher(inputTypeString).matches()) { - inputTypeDecimal = true; + // No casting needed, return the child expression as it is + return child; } - if (decimalTypePattern.matcher(castTypeString).matches()) { - castTypeDecimal = true; - } - - if (castTypeDecimal && !inputTypeDecimal) { + GenericUDF castUdf = null; + if (isCastTypeDecimal && !isChildTypeDecimal) { // Cast the input to decimal // If castType is decimal, try not to lose precision for numeric types. - castType = updatePrecision(inputTypeInfo, (DecimalTypeInfo) castType); - GenericUDFToDecimal castToDecimalUDF = new GenericUDFToDecimal(); - castToDecimalUDF.setTypeInfo(castType); - List children = new ArrayList(); - children.add(child); - ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, castToDecimalUDF, children); - return desc; - } else if (!castTypeDecimal && inputTypeDecimal) { - - // Cast decimal input to returnType - GenericUDF genericUdf = getGenericUDFForCast(castType); - List children = new ArrayList(); - children.add(child); - ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, genericUdf, children); - return desc; + castType = updatePrecision(childTypeInfo, (DecimalTypeInfo) castType); + castUdf = new GenericUDFToDecimal(); + ((GenericUDFToDecimal)castUdf).setTypeInfo(castType); + } else if (isChildTypeDecimal || udf instanceof GenericUDFElt || udf instanceof GenericUDFCoalesce) { + castUdf = getGenericUDFForCast(castType); } else { - - // Casts to exact types including long to double etc. are needed in some special cases. - if (udf instanceof GenericUDFCoalesce) { - GenericUDF genericUdf = getGenericUDFForCast(castType); - List children = new ArrayList(); - children.add(child); - ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, genericUdf, children); - return desc; - } - } - return null; - } - - private int getPrecisionForType(PrimitiveTypeInfo typeInfo) { - if (isFloatFamily(typeInfo.getTypeName())) { - return HiveDecimal.MAX_PRECISION; + return child; } - return HiveDecimalUtils.getPrecisionForType(typeInfo); + List castChildren = new ArrayList(); + castChildren.add(child); + ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, castUdf, castChildren); + return desc; } private GenericUDF getGenericUDFForCast(TypeInfo castType) throws HiveException { @@ -896,6 +875,10 @@ private VectorExpression getGenericUdfVectorExpression(GenericUDF udf, // Coalesce is a special case because it can take variable number of arguments. return getCoalesceExpression(childExpr, returnType); + } else if (udf instanceof GenericUDFElt) { + + // Elt is a special case because it can take variable number of arguments. + return getEltExpression(childExpr, returnType); } else if (udf instanceof GenericUDFBridge) { VectorExpression v = getGenericUDFBridgeVectorExpression((GenericUDFBridge) udf, childExpr, mode, returnType); @@ -948,6 +931,33 @@ private VectorExpression getCoalesceExpression(List childExpr, Typ } } + private VectorExpression getEltExpression(List childExpr, TypeInfo returnType) + throws HiveException { + int[] inputColumns = new int[childExpr.size()]; + VectorExpression[] vectorChildren = null; + try { + vectorChildren = getVectorExpressions(childExpr, Mode.PROJECTION); + + int i = 0; + for (VectorExpression ve : vectorChildren) { + inputColumns[i++] = ve.getOutputColumn(); + } + + int outColumn = ocm.allocateOutputColumn(getNormalizedTypeName(returnType.getTypeName())); + VectorElt vectorElt = new VectorElt(inputColumns, outColumn); + vectorElt.setOutputType(returnType.getTypeName()); + vectorElt.setChildExpressions(vectorChildren); + return vectorElt; + } finally { + // Free the output columns of the child expressions. + if (vectorChildren != null) { + for (VectorExpression v : vectorChildren) { + ocm.freeOutputColumn(v.getOutputColumn()); + } + } + } + } + /** * Create a filter or boolean-valued expression for column IN ( ) */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorElt.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorElt.java new file mode 100644 index 0000000..329d381 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorElt.java @@ -0,0 +1,137 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; + +public class VectorElt extends VectorExpression { + + private static final long serialVersionUID = 1L; + private int [] inputColumns; + private int outputColumn; + + public VectorElt(int [] inputColumns, int outputColumn) { + this(); + this.inputColumns = inputColumns; + this.outputColumn = outputColumn; + } + + public VectorElt() { + super(); + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + int[] sel = batch.selected; + int n = batch.size; + BytesColumnVector outputVector = (BytesColumnVector) batch.cols[outputColumn]; + if (n <= 0) { + return; + } + + outputVector.init(); + + outputVector.noNulls = false; + outputVector.isRepeating = false; + + LongColumnVector inputIndexVector = (LongColumnVector) batch.cols[inputColumns[0]]; + long[] indexVector = inputIndexVector.vector; + if (inputIndexVector.isRepeating) { + int index = (int)indexVector[0]; + if (index > 0 && index < inputColumns.length) { + BytesColumnVector cv = (BytesColumnVector) batch.cols[inputColumns[index]]; + if (cv.isRepeating) { + outputVector.setElement(0, 0, cv); + outputVector.isRepeating = true; + } else if (batch.selectedInUse) { + for (int j = 0; j != n; j++) { + int i = sel[j]; + outputVector.setVal(i, cv.vector[0], cv.start[0], cv.length[0]); + } + } else { + for (int i = 0; i != n; i++) { + outputVector.setVal(i, cv.vector[0], cv.start[0], cv.length[0]); + } + } + } else { + outputVector.isNull[0] = true; + outputVector.isRepeating = true; + } + } else if (batch.selectedInUse) { + for (int j = 0; j != n; j++) { + int i = sel[j]; + int index = (int)indexVector[i]; + if (index > 0 && index < inputColumns.length) { + BytesColumnVector cv = (BytesColumnVector) batch.cols[inputColumns[index]]; + int cvi = cv.isRepeating ? 0 : i; + outputVector.setVal(i, cv.vector[cvi], cv.start[cvi], cv.length[cvi]); + } else { + outputVector.isNull[i] = true; + } + } + } else { + for (int i = 0; i != n; i++) { + int index = (int)indexVector[i]; + if (index > 0 && index < inputColumns.length) { + BytesColumnVector cv = (BytesColumnVector) batch.cols[inputColumns[index]]; + int cvi = cv.isRepeating ? 0 : i; + outputVector.setVal(i, cv.vector[cvi], cv.start[cvi], cv.length[cvi]); + } else { + outputVector.isNull[i] = true; + } + } + } + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return outputType; + } + + public int [] getInputColumns() { + return inputColumns; + } + + public void setInputColumns(int [] inputColumns) { + this.inputColumns = inputColumns; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + // Descriptor is not defined because it takes variable number of arguments with different + // data types. + throw new UnsupportedOperationException("Undefined descriptor"); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 386fed6..e21e32c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -222,6 +222,7 @@ public Vectorizer() { supportedGenericUDFs.add(GenericUDFCase.class); supportedGenericUDFs.add(GenericUDFWhen.class); supportedGenericUDFs.add(GenericUDFCoalesce.class); + supportedGenericUDFs.add(GenericUDFElt.class); // For type casts supportedGenericUDFs.add(UDFToLong.class); diff --git a/ql/src/test/queries/clientpositive/vector_elt.q b/ql/src/test/queries/clientpositive/vector_elt.q new file mode 100644 index 0000000..1430a17 --- /dev/null +++ b/ql/src/test/queries/clientpositive/vector_elt.q @@ -0,0 +1,34 @@ +SET hive.vectorized.execution.enabled=true; + +EXPLAIN SELECT (ctinyint % 2) + 1, cstring1, cint, elt((ctinyint % 2) + 1, cstring1, cint) +FROM alltypesorc +WHERE ctinyint > 0 LIMIT 10; + +SELECT (ctinyint % 2) + 1, cstring1, cint, elt((ctinyint % 2) + 1, cstring1, cint) +FROM alltypesorc +WHERE ctinyint > 0 LIMIT 10; + +EXPLAIN +SELECT elt(2, 'abc', 'defg'), + elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'), + elt('1', 'abc', 'defg'), + elt(2, 'aa', CAST('2' AS TINYINT)), + elt(2, 'aa', CAST('12345' AS SMALLINT)), + elt(2, 'aa', CAST('123456789012' AS BIGINT)), + elt(2, 'aa', CAST(1.25 AS FLOAT)), + elt(2, 'aa', CAST(16.0 AS DOUBLE)), + elt(0, 'abc', 'defg'), + elt(3, 'abc', 'defg') +FROM alltypesorc LIMIT 1; + +SELECT elt(2, 'abc', 'defg'), + elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'), + elt('1', 'abc', 'defg'), + elt(2, 'aa', CAST('2' AS TINYINT)), + elt(2, 'aa', CAST('12345' AS SMALLINT)), + elt(2, 'aa', CAST('123456789012' AS BIGINT)), + elt(2, 'aa', CAST(1.25 AS FLOAT)), + elt(2, 'aa', CAST(16.0 AS DOUBLE)), + elt(0, 'abc', 'defg'), + elt(3, 'abc', 'defg') +FROM alltypesorc LIMIT 1; diff --git a/ql/src/test/results/clientpositive/vector_elt.q.out b/ql/src/test/results/clientpositive/vector_elt.q.out new file mode 100644 index 0000000..ea0af62 --- /dev/null +++ b/ql/src/test/results/clientpositive/vector_elt.q.out @@ -0,0 +1,154 @@ +PREHOOK: query: EXPLAIN SELECT (ctinyint % 2) + 1, cstring1, cint, elt((ctinyint % 2) + 1, cstring1, cint) +FROM alltypesorc +WHERE ctinyint > 0 LIMIT 10 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT (ctinyint % 2) + 1, cstring1, cint, elt((ctinyint % 2) + 1, cstring1, cint) +FROM alltypesorc +WHERE ctinyint > 0 LIMIT 10 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: alltypesorc + Statistics: Num rows: 3492 Data size: 377237 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (ctinyint > 0) (type: boolean) + Statistics: Num rows: 1164 Data size: 125745 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ((ctinyint % 2) + 1) (type: int), cstring1 (type: string), cint (type: int), elt(((ctinyint % 2) + 1), cstring1, cint) (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1164 Data size: 125745 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 10 + Statistics: Num rows: 10 Data size: 1080 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 10 Data size: 1080 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 10 + Processor Tree: + ListSink + +PREHOOK: query: SELECT (ctinyint % 2) + 1, cstring1, cint, elt((ctinyint % 2) + 1, cstring1, cint) +FROM alltypesorc +WHERE ctinyint > 0 LIMIT 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: SELECT (ctinyint % 2) + 1, cstring1, cint, elt((ctinyint % 2) + 1, cstring1, cint) +FROM alltypesorc +WHERE ctinyint > 0 LIMIT 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +2 cvLH6Eat2yFsyy7p 528534767 528534767 +2 cvLH6Eat2yFsyy7p 528534767 528534767 +2 cvLH6Eat2yFsyy7p 528534767 528534767 +2 cvLH6Eat2yFsyy7p 528534767 528534767 +2 cvLH6Eat2yFsyy7p 528534767 528534767 +1 cvLH6Eat2yFsyy7p 528534767 cvLH6Eat2yFsyy7p +2 cvLH6Eat2yFsyy7p 528534767 528534767 +1 cvLH6Eat2yFsyy7p 528534767 cvLH6Eat2yFsyy7p +2 cvLH6Eat2yFsyy7p 528534767 528534767 +1 cvLH6Eat2yFsyy7p 528534767 cvLH6Eat2yFsyy7p +PREHOOK: query: EXPLAIN +SELECT elt(2, 'abc', 'defg'), + elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'), + elt('1', 'abc', 'defg'), + elt(2, 'aa', CAST('2' AS TINYINT)), + elt(2, 'aa', CAST('12345' AS SMALLINT)), + elt(2, 'aa', CAST('123456789012' AS BIGINT)), + elt(2, 'aa', CAST(1.25 AS FLOAT)), + elt(2, 'aa', CAST(16.0 AS DOUBLE)), + elt(0, 'abc', 'defg'), + elt(3, 'abc', 'defg') +FROM alltypesorc LIMIT 1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT elt(2, 'abc', 'defg'), + elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'), + elt('1', 'abc', 'defg'), + elt(2, 'aa', CAST('2' AS TINYINT)), + elt(2, 'aa', CAST('12345' AS SMALLINT)), + elt(2, 'aa', CAST('123456789012' AS BIGINT)), + elt(2, 'aa', CAST(1.25 AS FLOAT)), + elt(2, 'aa', CAST(16.0 AS DOUBLE)), + elt(0, 'abc', 'defg'), + elt(3, 'abc', 'defg') +FROM alltypesorc LIMIT 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: alltypesorc + Statistics: Num rows: 0 Data size: 377237 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: elt(2, 'abc', 'defg') (type: string), elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg') (type: string), elt('1', 'abc', 'defg') (type: string), elt(2, 'aa', UDFToByte('2')) (type: string), elt(2, 'aa', UDFToShort('12345')) (type: string), elt(2, 'aa', UDFToLong('123456789012')) (type: string), elt(2, 'aa', UDFToFloat(1.25)) (type: string), elt(2, 'aa', 16.0) (type: string), elt(0, 'abc', 'defg') (type: string), elt(3, 'abc', 'defg') (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 + Statistics: Num rows: 0 Data size: 377237 Basic stats: PARTIAL Column stats: COMPLETE + Limit + Number of rows: 1 + Statistics: Num rows: 0 Data size: 377237 Basic stats: PARTIAL Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 377237 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT elt(2, 'abc', 'defg'), + elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'), + elt('1', 'abc', 'defg'), + elt(2, 'aa', CAST('2' AS TINYINT)), + elt(2, 'aa', CAST('12345' AS SMALLINT)), + elt(2, 'aa', CAST('123456789012' AS BIGINT)), + elt(2, 'aa', CAST(1.25 AS FLOAT)), + elt(2, 'aa', CAST(16.0 AS DOUBLE)), + elt(0, 'abc', 'defg'), + elt(3, 'abc', 'defg') +FROM alltypesorc LIMIT 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +POSTHOOK: query: SELECT elt(2, 'abc', 'defg'), + elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'), + elt('1', 'abc', 'defg'), + elt(2, 'aa', CAST('2' AS TINYINT)), + elt(2, 'aa', CAST('12345' AS SMALLINT)), + elt(2, 'aa', CAST('123456789012' AS BIGINT)), + elt(2, 'aa', CAST(1.25 AS FLOAT)), + elt(2, 'aa', CAST(16.0 AS DOUBLE)), + elt(0, 'abc', 'defg'), + elt(3, 'abc', 'defg') +FROM alltypesorc LIMIT 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +#### A masked pattern was here #### +defg cc abc 2 12345 123456789012 1.25 16.0 NULL NULL -- 1.8.5.2 (Apple Git-48)