diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index 9d27b8d5c0..abf76a95e7 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -877,6 +877,7 @@ minillaplocal.query.files=\ vector_decimal_udf.q,\ vector_decimal64_case_when_nvl.q,\ vector_decimal64_case_when_nvl_cbo.q,\ + vector_decimal64_multi_vertex.q,\ vector_full_outer_join.q,\ vector_fullouter_mapjoin_1_fast.q,\ vector_fullouter_mapjoin_1_optimized.q,\ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 043fdd63e7..358aa06a13 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -834,6 +834,11 @@ public int allocateScratchColumn(TypeInfo typeInfo) throws HiveException { return ocm.allocateOutputColumn(typeInfo); } + public int allocateScratchColumn(TypeInfo typeInfo, DataTypePhysicalVariation dataTypePhysicalVariation) + throws HiveException { + return ocm.allocateOutputColumn(typeInfo, dataTypePhysicalVariation); + } + public int[] currentScratchColumns() { return ocm.currentScratchColumns(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 988291664e..4f7de35bda 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -3832,7 +3832,14 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi // Make a new big table scratch column for the small table value. TypeInfo typeInfo = smallTableExprNode.getTypeInfo(); - int scratchColumn = vContext.allocateScratchColumn(typeInfo); + DataTypePhysicalVariation dataTypePhysicalVariation = DataTypePhysicalVariation.NONE; + if (typeInfo instanceof DecimalTypeInfo) { + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; + if (decimalTypeInfo.getPrecision() <= 18) { + dataTypePhysicalVariation = DataTypePhysicalVariation.DECIMAL_64; + } + } + int scratchColumn = vContext.allocateScratchColumn(typeInfo, dataTypePhysicalVariation); projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo); @@ -4117,11 +4124,9 @@ private boolean canSpecializeReduceSink(ReduceSinkDesc desc, VectorExpression ve = allValueExpressions[i]; reduceSinkValueColumnMap[i] = ve.getOutputColumnNum(); reduceSinkValueTypeInfos[i] = valueDescs.get(i).getTypeInfo(); - reduceSinkValueColumnVectorTypes[i] = - VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkValueTypeInfos[i]); - if (!IdentityExpression.isColumnOnly(ve)) { - reduceSinkValueExpressionsList.add(ve); - } + reduceSinkValueColumnVectorTypes[i] = VectorizationContext.getColumnVectorTypeFromTypeInfo( + reduceSinkValueTypeInfos[i], ve.getOutputDataTypePhysicalVariation()); + reduceSinkValueExpressionsList.add(ve); } if (reduceSinkValueExpressionsList.size() == 0) { reduceSinkValueExpressions = null; @@ -4676,10 +4681,6 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { ExprNodeDesc expr = colList.get(i); VectorExpression ve = vContext.getVectorExpression(expr); projectedOutputColumns[i] = ve.getOutputColumnNum(); - if (ve instanceof IdentityExpression) { - // Suppress useless evaluation. - continue; - } vectorSelectExprs[index++] = ve; } if (index < size) { diff --git a/ql/src/test/queries/clientpositive/perf/query19.q b/ql/src/test/queries/clientpositive/perf/query19.q index 5768e4b04e..4b2eaace6e 100644 --- a/ql/src/test/queries/clientpositive/perf/query19.q +++ b/ql/src/test/queries/clientpositive/perf/query19.q @@ -1,6 +1,7 @@ set hive.mapred.mode=nonstrict; +set hive.explain.user=false; -- start query 1 in stream 0 using template query19.tpl and seed 1930872976 -explain +explain vectorization detail select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, sum(ss_ext_sales_price) ext_price from date_dim, store_sales, item,customer,customer_address,store diff --git a/ql/src/test/queries/clientpositive/vector_decimal64_multi_vertex.q b/ql/src/test/queries/clientpositive/vector_decimal64_multi_vertex.q new file mode 100644 index 0000000000..75685c20ac --- /dev/null +++ b/ql/src/test/queries/clientpositive/vector_decimal64_multi_vertex.q @@ -0,0 +1,85 @@ +set hive.mapred.mode=nonstrict; +set hive.explain.user=false; +set hive.auto.convert.join=true; +set hive.auto.convert.join.noconditionaltask=true; +set hive.auto.convert.join.noconditionaltask.size=10000; +-- start query 1 in stream 0 using template query19.tpl and seed 1930872976 +create table store +( + s_store_sk int, + s_store_id string, + s_rec_start_date string, + s_rec_end_date string, + s_closed_date_sk int, + s_store_name string, + s_number_employees int, + s_floor_space int, + s_hours string, + s_manager string, + s_market_id int, + s_geography_class string, + s_market_desc string, + s_market_manager string, + s_division_id int, + s_division_name string, + s_company_id int, + s_company_name string, + s_street_number string, + s_street_name string, + s_street_type string, + s_suite_number string, + s_city string, + s_county string, + s_state string, + s_zip string, + s_country string, + s_gmt_offset decimal(5,2), + s_tax_precentage decimal(5,2) +) +row format delimited fields terminated by '\t' +STORED AS ORC; +create table store_sales +( + ss_sold_date_sk int, + ss_sold_time_sk int, + ss_item_sk int, + ss_customer_sk int, + ss_cdemo_sk int, + ss_hdemo_sk int, + ss_addr_sk int, + ss_store_sk int, + ss_promo_sk int, + ss_ticket_number int, + ss_quantity int, + ss_wholesale_cost decimal(7,2), + ss_list_price decimal(7,2), + ss_sales_price decimal(7,2), + ss_ext_discount_amt decimal(7,2), + ss_ext_sales_price decimal(7,2), + ss_ext_wholesale_cost decimal(7,2), + ss_ext_list_price decimal(7,2), + ss_ext_tax decimal(7,2), + ss_coupon_amt decimal(7,2), + ss_net_paid decimal(7,2), + ss_net_paid_inc_tax decimal(7,2), + ss_net_profit decimal(7,2) +) +row format delimited fields terminated by '\t' +STORED AS ORC; + +explain vectorization detail +select s_store_id brand_id, s_rec_start_date brand, s_rec_end_date, s_closed_date_sk, + sum(ss_ext_sales_price) ext_price +-- from store_sales, item, customer_address, store + from store_sales, store + where ss_item_sk = s_store_sk + -- and ss_customer_sk = ca_address_sk +-- and substr(ca_zip,1,5) <> substr(s_zip,1,5) +-- and ss_store_sk = s_store_sk +-- and ss_sold_date_sk <> i_brand_id + group by s_store_id, + s_rec_start_date, + s_rec_end_date, + s_closed_date_sk; + +-- end query 1 in stream 0 using template query19.tpl diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal64_multi_vertex.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal64_multi_vertex.q.out new file mode 100644 index 0000000000..5a928cb772 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/vector_decimal64_multi_vertex.q.out @@ -0,0 +1,384 @@ +PREHOOK: query: create table store +( + s_store_sk int, + s_store_id string, + s_rec_start_date string, + s_rec_end_date string, + s_closed_date_sk int, + s_store_name string, + s_number_employees int, + s_floor_space int, + s_hours string, + s_manager string, + s_market_id int, + s_geography_class string, + s_market_desc string, + s_market_manager string, + s_division_id int, + s_division_name string, + s_company_id int, + s_company_name string, + s_street_number string, + s_street_name string, + s_street_type string, + s_suite_number string, + s_city string, + s_county string, + s_state string, + s_zip string, + s_country string, + s_gmt_offset decimal(5,2), + s_tax_precentage decimal(5,2) +) +row format delimited fields terminated by '\t' +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@store +POSTHOOK: query: create table store +( + s_store_sk int, + s_store_id string, + s_rec_start_date string, + s_rec_end_date string, + s_closed_date_sk int, + s_store_name string, + s_number_employees int, + s_floor_space int, + s_hours string, + s_manager string, + s_market_id int, + s_geography_class string, + s_market_desc string, + s_market_manager string, + s_division_id int, + s_division_name string, + s_company_id int, + s_company_name string, + s_street_number string, + s_street_name string, + s_street_type string, + s_suite_number string, + s_city string, + s_county string, + s_state string, + s_zip string, + s_country string, + s_gmt_offset decimal(5,2), + s_tax_precentage decimal(5,2) +) +row format delimited fields terminated by '\t' +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@store +PREHOOK: query: create table store_sales +( + ss_sold_date_sk int, + ss_sold_time_sk int, + ss_item_sk int, + ss_customer_sk int, + ss_cdemo_sk int, + ss_hdemo_sk int, + ss_addr_sk int, + ss_store_sk int, + ss_promo_sk int, + ss_ticket_number int, + ss_quantity int, + ss_wholesale_cost decimal(7,2), + ss_list_price decimal(7,2), + ss_sales_price decimal(7,2), + ss_ext_discount_amt decimal(7,2), + ss_ext_sales_price decimal(7,2), + ss_ext_wholesale_cost decimal(7,2), + ss_ext_list_price decimal(7,2), + ss_ext_tax decimal(7,2), + ss_coupon_amt decimal(7,2), + ss_net_paid decimal(7,2), + ss_net_paid_inc_tax decimal(7,2), + ss_net_profit decimal(7,2) +) +row format delimited fields terminated by '\t' +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@store_sales +POSTHOOK: query: create table store_sales +( + ss_sold_date_sk int, + ss_sold_time_sk int, + ss_item_sk int, + ss_customer_sk int, + ss_cdemo_sk int, + ss_hdemo_sk int, + ss_addr_sk int, + ss_store_sk int, + ss_promo_sk int, + ss_ticket_number int, + ss_quantity int, + ss_wholesale_cost decimal(7,2), + ss_list_price decimal(7,2), + ss_sales_price decimal(7,2), + ss_ext_discount_amt decimal(7,2), + ss_ext_sales_price decimal(7,2), + ss_ext_wholesale_cost decimal(7,2), + ss_ext_list_price decimal(7,2), + ss_ext_tax decimal(7,2), + ss_coupon_amt decimal(7,2), + ss_net_paid decimal(7,2), + ss_net_paid_inc_tax decimal(7,2), + ss_net_profit decimal(7,2) +) +row format delimited fields terminated by '\t' +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@store_sales +PREHOOK: query: explain vectorization detail +select s_store_id brand_id, s_rec_start_date brand, s_rec_end_date, s_closed_date_sk, + sum(ss_ext_sales_price) ext_price + + from store_sales, store + where ss_item_sk = s_store_sk + -- and ss_customer_sk = ca_address_sk + + + + group by s_store_id, + s_rec_start_date, + s_rec_end_date, + s_closed_date_sk +PREHOOK: type: QUERY +PREHOOK: Input: default@store +PREHOOK: Input: default@store_sales +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail +select s_store_id brand_id, s_rec_start_date brand, s_rec_end_date, s_closed_date_sk, + sum(ss_ext_sales_price) ext_price + + from store_sales, store + where ss_item_sk = s_store_sk + -- and ss_customer_sk = ca_address_sk + + + + group by s_store_id, + s_rec_start_date, + s_rec_end_date, + s_closed_date_sk +POSTHOOK: type: QUERY +POSTHOOK: Input: default@store +POSTHOOK: Input: default@store_sales +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: store_sales + filterExpr: ss_item_sk is not null (type: boolean) + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:ss_sold_date_sk:int, 1:ss_sold_time_sk:int, 2:ss_item_sk:int, 3:ss_customer_sk:int, 4:ss_cdemo_sk:int, 5:ss_hdemo_sk:int, 6:ss_addr_sk:int, 7:ss_store_sk:int, 8:ss_promo_sk:int, 9:ss_ticket_number:int, 10:ss_quantity:int, 11:ss_wholesale_cost:decimal(7,2)/DECIMAL_64, 12:ss_list_price:decimal(7,2)/DECIMAL_64, 13:ss_sales_price:decimal(7,2)/DECIMAL_64, 14:ss_ext_discount_amt:decimal(7,2)/DECIMAL_64, 15:ss_ext_sales_price:decimal(7,2)/DECIMAL_64, 16:ss_ext_wholesale_cost:decimal(7,2)/DECIMAL_64, 17:ss_ext_list_price:decimal(7,2)/DECIMAL_64, 18:ss_ext_tax:decimal(7,2)/DECIMAL_64, 19:ss_coupon_amt:decimal(7,2)/DECIMAL_64, 20:ss_net_paid:decimal(7,2)/DECIMAL_64, 21:ss_net_paid_inc_tax:decimal(7,2)/DECIMAL_64, 22:ss_net_profit:decimal(7,2)/DECIMAL_64, 23:ROW__ID:struct] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 2:int) + predicate: ss_item_sk is not null (type: boolean) + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ss_item_sk (type: int), ss_ext_sales_price (type: decimal(7,2)) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2, 15] + selectExpressions: col 2:int, col 15:decimal(7,2)/DECIMAL_64 + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 2:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 15:decimal(7,2) + valueExpressions: col 15:decimal(7,2)/DECIMAL_64 + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: decimal(7,2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 23 + includeColumns: [2, 15] + dataColumns: ss_sold_date_sk:int, ss_sold_time_sk:int, ss_item_sk:int, ss_customer_sk:int, ss_cdemo_sk:int, ss_hdemo_sk:int, ss_addr_sk:int, ss_store_sk:int, ss_promo_sk:int, ss_ticket_number:int, ss_quantity:int, ss_wholesale_cost:decimal(7,2)/DECIMAL_64, ss_list_price:decimal(7,2)/DECIMAL_64, ss_sales_price:decimal(7,2)/DECIMAL_64, ss_ext_discount_amt:decimal(7,2)/DECIMAL_64, ss_ext_sales_price:decimal(7,2)/DECIMAL_64, ss_ext_wholesale_cost:decimal(7,2)/DECIMAL_64, ss_ext_list_price:decimal(7,2)/DECIMAL_64, ss_ext_tax:decimal(7,2)/DECIMAL_64, ss_coupon_amt:decimal(7,2)/DECIMAL_64, ss_net_paid:decimal(7,2)/DECIMAL_64, ss_net_paid_inc_tax:decimal(7,2)/DECIMAL_64, ss_net_profit:decimal(7,2)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: store + filterExpr: s_store_sk is not null (type: boolean) + Statistics: Num rows: 1 Data size: 560 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:s_store_sk:int, 1:s_store_id:string, 2:s_rec_start_date:string, 3:s_rec_end_date:string, 4:s_closed_date_sk:int, 5:s_store_name:string, 6:s_number_employees:int, 7:s_floor_space:int, 8:s_hours:string, 9:s_manager:string, 10:s_market_id:int, 11:s_geography_class:string, 12:s_market_desc:string, 13:s_market_manager:string, 14:s_division_id:int, 15:s_division_name:string, 16:s_company_id:int, 17:s_company_name:string, 18:s_street_number:string, 19:s_street_name:string, 20:s_street_type:string, 21:s_suite_number:string, 22:s_city:string, 23:s_county:string, 24:s_state:string, 25:s_zip:string, 26:s_country:string, 27:s_gmt_offset:decimal(5,2)/DECIMAL_64, 28:s_tax_precentage:decimal(5,2)/DECIMAL_64, 29:ROW__ID:struct] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: SelectColumnIsNotNull(col 0:int) + predicate: s_store_sk is not null (type: boolean) + Statistics: Num rows: 1 Data size: 560 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: s_store_sk (type: int), s_store_id (type: string), s_rec_start_date (type: string), s_rec_end_date (type: string), s_closed_date_sk (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4] + selectExpressions: col 0:int, col 1:string, col 2:string, col 3:string, col 4:int + Statistics: Num rows: 1 Data size: 560 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [1, 2, 3, 4] + bigTableValueColumns: 1:string, 2:string, 3:string, 4:int + className: VectorMapJoinInnerLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + projectedOutput: 30:decimal(7,2), 1:string, 2:string, 3:string, 4:int + smallTableValueMapping: 30:decimal(7,2) + hashTableImplementationType: OPTIMIZED + outputColumnNames: _col1, _col3, _col4, _col5, _col6 + input vertices: + 0 Map 1 + Statistics: Num rows: 1 Data size: 127 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col1) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal64(col 30:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64 + className: VectorGroupByOperator + groupByMode: HASH + keyExpressions: col 1:string, col 2:string, col 3:string, col 4:int + native: false + vectorProcessingMode: HASH + projectedOutputColumnNums: [0] + keys: _col3 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: int) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 127 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: int) + null sort order: zzzz + sort order: ++++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:string, 1:string, 2:string, 3:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 4:decimal(17,2) + valueExpressions: col 4:decimal(17,2)/DECIMAL_64 + Statistics: Num rows: 1 Data size: 127 Basic stats: COMPLETE Column stats: NONE + value expressions: _col4 (type: decimal(17,2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 29 + includeColumns: [0, 1, 2, 3, 4] + dataColumns: s_store_sk:int, s_store_id:string, s_rec_start_date:string, s_rec_end_date:string, s_closed_date_sk:int, s_store_name:string, s_number_employees:int, s_floor_space:int, s_hours:string, s_manager:string, s_market_id:int, s_geography_class:string, s_market_desc:string, s_market_manager:string, s_division_id:int, s_division_name:string, s_company_id:int, s_company_name:string, s_street_number:string, s_street_name:string, s_street_type:string, s_suite_number:string, s_city:string, s_county:string, s_state:string, s_zip:string, s_country:string, s_gmt_offset:decimal(5,2)/DECIMAL_64, s_tax_precentage:decimal(5,2)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(7,2)/DECIMAL_64] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: zzzz + reduceColumnSortOrder: ++++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 5 + dataColumns: KEY._col0:string, KEY._col1:string, KEY._col2:string, KEY._col3:int, VALUE._col0:decimal(17,2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumDecimal(col 4:decimal(17,2)) -> decimal(17,2) + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + keyExpressions: col 0:string, col 1:string, col 2:string, col 3:int + native: false + vectorProcessingMode: MERGE_PARTIAL + projectedOutputColumnNums: [0] + keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string), KEY._col3 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 127 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 127 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink +