diff --git a/data/files/decimal64table2.csv b/data/files/decimal64table2.csv new file mode 100644 index 0000000000..dc551dc799 --- /dev/null +++ b/data/files/decimal64table2.csv @@ -0,0 +1,4 @@ +null,10,101,10 +1000,10,101,10 +1000,10,101,10 +1000,10,100,10 diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index 375e22c2c1..9d27b8d5c0 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -876,6 +876,7 @@ minillaplocal.query.files=\ vector_decimal_2.q,\ vector_decimal_udf.q,\ vector_decimal64_case_when_nvl.q,\ + vector_decimal64_case_when_nvl_cbo.q,\ vector_full_outer_join.q,\ vector_fullouter_mapjoin_1_fast.q,\ vector_fullouter_mapjoin_1_optimized.q,\ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index d7d8b6fee1..d837dbd928 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -43,6 +43,7 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.expressions.ConvertDecimal64ToDecimal; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorCoalesce; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalColDivideDecimalScalar; import org.apache.hadoop.hive.ql.exec.vector.reducesink.*; import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFArgDesc; @@ -4744,15 +4745,27 @@ private static VectorExpression fixDecimalDataTypePhysicalVariations(final Vecto } else { Object[] arguments; int argumentCount = children.length + (parent.getOutputColumnNum() == -1 ? 0 : 1); - if (parent instanceof DecimalColDivideDecimalScalar) { - arguments = new Object[argumentCount + 1]; - arguments[children.length] = ((DecimalColDivideDecimalScalar) parent).getValue(); + // VectorCoalesce receives arguments as an array. + // Need to handle it as a special case to avpid instantiation failure. + if (parent instanceof VectorCoalesce) { + arguments = new Object[2]; + arguments[0] = new int[children.length]; + for (int i = 0; i < children.length; i++) { + VectorExpression vce = children[i]; + ((int[]) arguments[0])[i] = vce.getOutputColumnNum(); + } + arguments[1] = parent.getOutputColumnNum(); } else { - arguments = new Object[argumentCount]; - } - for (int i = 0; i < children.length; i++) { - VectorExpression vce = children[i]; - arguments[i] = vce.getOutputColumnNum(); + if (parent instanceof DecimalColDivideDecimalScalar) { + arguments = new Object[argumentCount + 1]; + arguments[children.length] = ((DecimalColDivideDecimalScalar) parent).getValue(); + } else { + arguments = new Object[argumentCount]; + } + for (int i = 0; i < children.length; i++) { + VectorExpression vce = children[i]; + arguments[i] = vce.getOutputColumnNum(); + } } // retain output column number from parent if (parent.getOutputColumnNum() != -1) { diff --git a/ql/src/test/queries/clientpositive/vector_decimal64_case_when_nvl_cbo.q b/ql/src/test/queries/clientpositive/vector_decimal64_case_when_nvl_cbo.q new file mode 100644 index 0000000000..00331b847e --- /dev/null +++ b/ql/src/test/queries/clientpositive/vector_decimal64_case_when_nvl_cbo.q @@ -0,0 +1,12 @@ +set hive.cbo.enable=true; +set hive.explain.user=false; +create external table vector_decimal64_case_when(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(19,1), ss_ext_discount_amt int, ss_ext_sales_price double) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE; +LOAD DATA LOCAL INPATH '../../data/files/decimal64table2.csv' OVERWRITE INTO TABLE vector_decimal64_case_when; +create table vector_decimal64_case_when_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(19,1), ss_ext_discount_amt int, ss_ext_sales_price decimal(7,2)) stored as ORC; +insert into table vector_decimal64_case_when_tmp select * from vector_decimal64_case_when; +explain vectorization detail select sum(NVL(ss_ext_list_price, 1)) from vector_decimal64_case_when_tmp; +select sum(NVL(ss_ext_list_price, 1)) from vector_decimal64_case_when_tmp; +explain vectorization detail select sum(NVL(ss_ext_list_price, 1.1)) from vector_decimal64_case_when_tmp; +select sum(NVL(ss_ext_list_price, 1.1)) from vector_decimal64_case_when_tmp; +explain vectorization detail select sum(NVL(ss_ext_list_price, 1.1BD)) from vector_decimal64_case_when_tmp; +select sum(NVL(ss_ext_list_price, 1.1BD)) from vector_decimal64_case_when_tmp; diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl_cbo.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl_cbo.q.out new file mode 100644 index 0000000000..d3e6eec3fa --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/vector_decimal64_case_when_nvl_cbo.q.out @@ -0,0 +1,441 @@ +PREHOOK: query: create external table vector_decimal64_case_when(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(19,1), ss_ext_discount_amt int, ss_ext_sales_price double) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_decimal64_case_when +POSTHOOK: query: create external table vector_decimal64_case_when(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(19,1), ss_ext_discount_amt int, ss_ext_sales_price double) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_decimal64_case_when +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal64table2.csv' OVERWRITE INTO TABLE vector_decimal64_case_when +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@vector_decimal64_case_when +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal64table2.csv' OVERWRITE INTO TABLE vector_decimal64_case_when +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@vector_decimal64_case_when +PREHOOK: query: create table vector_decimal64_case_when_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(19,1), ss_ext_discount_amt int, ss_ext_sales_price decimal(7,2)) stored as ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_decimal64_case_when_tmp +POSTHOOK: query: create table vector_decimal64_case_when_tmp(ss_ext_list_price decimal(7,2), ss_ext_wholesale_cost decimal(19,1), ss_ext_discount_amt int, ss_ext_sales_price decimal(7,2)) stored as ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_decimal64_case_when_tmp +PREHOOK: query: insert into table vector_decimal64_case_when_tmp select * from vector_decimal64_case_when +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_case_when +PREHOOK: Output: default@vector_decimal64_case_when_tmp +POSTHOOK: query: insert into table vector_decimal64_case_when_tmp select * from vector_decimal64_case_when +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_case_when +POSTHOOK: Output: default@vector_decimal64_case_when_tmp +POSTHOOK: Lineage: vector_decimal64_case_when_tmp.ss_ext_discount_amt SIMPLE [(vector_decimal64_case_when)vector_decimal64_case_when.FieldSchema(name:ss_ext_discount_amt, type:int, comment:null), ] +POSTHOOK: Lineage: vector_decimal64_case_when_tmp.ss_ext_list_price SIMPLE [(vector_decimal64_case_when)vector_decimal64_case_when.FieldSchema(name:ss_ext_list_price, type:decimal(7,2), comment:null), ] +POSTHOOK: Lineage: vector_decimal64_case_when_tmp.ss_ext_sales_price EXPRESSION [(vector_decimal64_case_when)vector_decimal64_case_when.FieldSchema(name:ss_ext_sales_price, type:double, comment:null), ] +POSTHOOK: Lineage: vector_decimal64_case_when_tmp.ss_ext_wholesale_cost SIMPLE [(vector_decimal64_case_when)vector_decimal64_case_when.FieldSchema(name:ss_ext_wholesale_cost, type:decimal(19,1), comment:null), ] +PREHOOK: query: explain vectorization detail select sum(NVL(ss_ext_list_price, 1)) from vector_decimal64_case_when_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail select sum(NVL(ss_ext_list_price, 1)) from vector_decimal64_case_when_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: vector_decimal64_case_when_tmp + Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:ss_ext_list_price:decimal(7,2)/DECIMAL_64, 1:ss_ext_wholesale_cost:decimal(19,1), 2:ss_ext_discount_amt:int, 3:ss_ext_sales_price:decimal(7,2)/DECIMAL_64, 4:ROW__ID:struct] + Select Operator + expressions: COALESCE(ss_ext_list_price,1) (type: decimal(12,2)) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [7] + selectExpressions: VectorCoalesce(columns [5, 6])(children: ConvertDecimal64ToDecimal(col 0:decimal(12,2)/DECIMAL_64) -> 5:decimal(12,2), ConstantVectorExpression(val 1) -> 6:decimal(10,0)) -> 7:decimal(12,2) + Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal(col 7:decimal(12,2)) -> decimal(22,2) + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + minReductionHashAggr: 0.75 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + null sort order: + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:decimal(22,2) + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: decimal(22,2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0] + dataColumns: ss_ext_list_price:decimal(7,2)/DECIMAL_64, ss_ext_wholesale_cost:decimal(19,1), ss_ext_discount_amt:int, ss_ext_sales_price:decimal(7,2)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(12,2), decimal(10,0), decimal(12,2)] + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:decimal(22,2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal(col 0:decimal(22,2)) -> decimal(22,2) + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(NVL(ss_ext_list_price, 1)) from vector_decimal64_case_when_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +POSTHOOK: query: select sum(NVL(ss_ext_list_price, 1)) from vector_decimal64_case_when_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +3001.00 +PREHOOK: query: explain vectorization detail select sum(NVL(ss_ext_list_price, 1.1)) from vector_decimal64_case_when_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail select sum(NVL(ss_ext_list_price, 1.1)) from vector_decimal64_case_when_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: vector_decimal64_case_when_tmp + Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:ss_ext_list_price:decimal(7,2)/DECIMAL_64, 1:ss_ext_wholesale_cost:decimal(19,1), 2:ss_ext_discount_amt:int, 3:ss_ext_sales_price:decimal(7,2)/DECIMAL_64, 4:ROW__ID:struct] + Select Operator + expressions: COALESCE(ss_ext_list_price,1.1) (type: decimal(7,2)) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [6] + selectExpressions: VectorCoalesce(columns [0, 5])(children: col 0:decimal(7,2)/DECIMAL_64, ConstantVectorExpression(val 1.1) -> 5:decimal(7,2)/DECIMAL_64) -> 6:decimal(7,2)/DECIMAL_64 + Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal64(col 6:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + minReductionHashAggr: 0.75 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + null sort order: + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:decimal(17,2) + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: decimal(17,2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0] + dataColumns: ss_ext_list_price:decimal(7,2)/DECIMAL_64, ss_ext_wholesale_cost:decimal(19,1), ss_ext_discount_amt:int, ss_ext_sales_price:decimal(7,2)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(7,2)/DECIMAL_64, decimal(7,2)/DECIMAL_64] + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:decimal(17,2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal(col 0:decimal(17,2)) -> decimal(17,2) + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(NVL(ss_ext_list_price, 1.1)) from vector_decimal64_case_when_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +POSTHOOK: query: select sum(NVL(ss_ext_list_price, 1.1)) from vector_decimal64_case_when_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +3001.10 +PREHOOK: query: explain vectorization detail select sum(NVL(ss_ext_list_price, 1.1BD)) from vector_decimal64_case_when_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail select sum(NVL(ss_ext_list_price, 1.1BD)) from vector_decimal64_case_when_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: vector_decimal64_case_when_tmp + Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:ss_ext_list_price:decimal(7,2)/DECIMAL_64, 1:ss_ext_wholesale_cost:decimal(19,1), 2:ss_ext_discount_amt:int, 3:ss_ext_sales_price:decimal(7,2)/DECIMAL_64, 4:ROW__ID:struct] + Select Operator + expressions: COALESCE(ss_ext_list_price,1.1) (type: decimal(7,2)) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [6] + selectExpressions: VectorCoalesce(columns [0, 5])(children: col 0:decimal(7,2)/DECIMAL_64, ConstantVectorExpression(val 1.1) -> 5:decimal(7,2)/DECIMAL_64) -> 6:decimal(7,2)/DECIMAL_64 + Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal64(col 6:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + minReductionHashAggr: 0.75 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + null sort order: + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:decimal(17,2) + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: decimal(17,2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0] + dataColumns: ss_ext_list_price:decimal(7,2)/DECIMAL_64, ss_ext_wholesale_cost:decimal(19,1), ss_ext_discount_amt:int, ss_ext_sales_price:decimal(7,2)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(7,2)/DECIMAL_64, decimal(7,2)/DECIMAL_64] + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:decimal(17,2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal(col 0:decimal(17,2)) -> decimal(17,2) + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select sum(NVL(ss_ext_list_price, 1.1BD)) from vector_decimal64_case_when_tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +POSTHOOK: query: select sum(NVL(ss_ext_list_price, 1.1BD)) from vector_decimal64_case_when_tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_decimal64_case_when_tmp +#### A masked pattern was here #### +3001.10