From 58ac44b47a8ef3b7c52c7ef9a090224d2cd12a4a Mon Sep 17 00:00:00 2001 From: Ashutosh Chauhan Date: Wed, 27 Jul 2016 19:21:11 -0700 Subject: [PATCH] HIVE-14367 : Estimated size for constant nulls is 0 --- .../results/clientpositive/udaf_example_avg.q.out | 8 +- .../results/clientpositive/udaf_example_max.q.out | 8 +- .../clientpositive/udaf_example_max_n.q.out | 8 +- .../results/clientpositive/udaf_example_min.q.out | 8 +- .../clientpositive/udaf_example_min_n.q.out | 8 +- .../stats/annotation/StatsRulesProcFactory.java | 2 +- .../apache/hadoop/hive/ql/stats/StatsUtils.java | 49 +-- .../hive/ql/udf/generic/GenericUDAFAverage.java | 10 +- .../hadoop/hive/ql/udf/generic/GenericUDAFMax.java | 11 +- .../hadoop/hive/ql/udf/generic/GenericUDAFMin.java | 10 + .../test/queries/clientpositive/vector_coalesce.q | 1 + .../results/clientnegative/udf_assert_true.q.out | 24 +- .../results/clientpositive/acid_table_stats.q.out | 22 +- .../annotate_stats_deep_filters.q.out | 6 +- .../clientpositive/annotate_stats_filter.q.out | 112 +++---- .../clientpositive/annotate_stats_groupby.q.out | 12 +- .../clientpositive/annotate_stats_join.q.out | 16 +- .../clientpositive/annotate_stats_join_pkfk.q.out | 74 ++--- .../clientpositive/annotate_stats_limit.q.out | 20 +- .../clientpositive/annotate_stats_select.q.out | 4 +- .../clientpositive/annotate_stats_union.q.out | 20 +- .../results/clientpositive/autoColumnStats_4.q.out | 8 +- .../results/clientpositive/autoColumnStats_7.q.out | 8 +- .../results/clientpositive/autoColumnStats_9.q.out | 10 +- .../cbo_rp_annotate_stats_groupby.q.out | 12 +- .../results/clientpositive/cbo_rp_auto_join0.q.out | 24 +- .../cbo_rp_groupby3_noskew_multi_distinct.q.out | 6 +- .../test/results/clientpositive/cbo_rp_join0.q.out | 50 +-- .../cbo_rp_udaf_percentile_approx_23.q.out | 16 +- .../results/clientpositive/constantfolding.q.out | 4 +- .../clientpositive/create_genericudaf.q.out | 8 +- .../results/clientpositive/decimal_precision.q.out | 8 +- .../results/clientpositive/decimal_stats.q.out | 12 +- .../test/results/clientpositive/decimal_udf.q.out | 8 +- .../results/clientpositive/fetch_aggregation.q.out | 4 +- ql/src/test/results/clientpositive/fold_case.q.out | 6 +- ql/src/test/results/clientpositive/groupby3.q.out | 10 +- .../test/results/clientpositive/groupby3_map.q.out | 6 +- .../groupby3_map_multi_distinct.q.out | 6 +- .../results/clientpositive/groupby3_map_skew.q.out | 10 +- .../results/clientpositive/groupby3_noskew.q.out | 6 +- .../groupby3_noskew_multi_distinct.q.out | 6 +- .../clientpositive/interval_arithmetic.q.out | 8 +- .../results/clientpositive/literal_decimal.q.out | 4 +- .../llap/dynamic_partition_pruning.q.out | 156 +++++----- .../vectorized_dynamic_partition_pruning.q.out | 156 +++++----- .../results/clientpositive/metadataonly1.q.out | 34 +- .../results/clientpositive/num_op_type_conv.q.out | 4 +- .../test/results/clientpositive/perf/query13.q.out | 4 +- .../test/results/clientpositive/perf/query28.q.out | 18 +- .../reduceSinkDeDuplication_pRS_key_empty.q.out | 14 +- .../clientpositive/remove_exprs_stats.q.out | 58 ++-- .../clientpositive/spark/annotate_stats_join.q.out | 16 +- .../results/clientpositive/spark/groupby3.q.out | 10 +- .../clientpositive/spark/groupby3_map.q.out | 6 +- .../spark/groupby3_map_multi_distinct.q.out | 6 +- .../clientpositive/spark/groupby3_map_skew.q.out | 10 +- .../clientpositive/spark/groupby3_noskew.q.out | 6 +- .../spark/groupby3_noskew_multi_distinct.q.out | 6 +- .../results/clientpositive/spark/subquery_in.q.out | 12 +- .../clientpositive/spark/union_remove_6_subq.q.out | 8 +- .../clientpositive/spark/vectorization_0.q.out | 46 +-- .../spark/vectorization_pushdown.q.out | 8 +- .../spark/vectorization_short_regress.q.out | 40 +-- .../clientpositive/spark/vectorized_mapjoin.q.out | 8 +- .../spark/vectorized_shufflejoin.q.out | 12 +- .../spark/vectorized_timestamp_funcs.q.out | 10 +- .../results/clientpositive/stats_list_bucket.q.out | 2 +- .../test/results/clientpositive/subquery_in.q.out | 12 +- .../clientpositive/subquery_multiinsert.q.out | 144 ++++----- .../results/clientpositive/subquery_notin.q.out | 38 +-- .../tez/dynamic_partition_pruning.q.out | 156 +++++----- .../results/clientpositive/tez/explainuser_1.q.out | 346 ++++++++++----------- .../results/clientpositive/tez/explainuser_3.q.out | 4 +- .../test/results/clientpositive/tez/groupby3.q.out | 10 +- .../results/clientpositive/tez/metadataonly1.q.out | 40 +-- .../results/clientpositive/tez/subquery_in.q.out | 12 +- .../clientpositive/tez/vector_aggregate_9.q.out | 8 +- .../tez/vector_aggregate_without_gby.q.out | 4 +- .../clientpositive/tez/vector_coalesce.q.out | 70 ++--- .../tez/vector_decimal_precision.q.out | 8 +- .../clientpositive/tez/vector_decimal_udf.q.out | 8 +- .../tez/vector_interval_arithmetic.q.out | 16 +- .../tez/vector_null_projection.q.out | 26 +- .../clientpositive/tez/vectorization_0.q.out | 46 +-- .../tez/vectorization_pushdown.q.out | 8 +- .../tez/vectorization_short_regress.q.out | 40 +-- .../tez/vectorized_distinct_gby.q.out | 8 +- .../tez/vectorized_dynamic_partition_pruning.q.out | 156 +++++----- .../clientpositive/tez/vectorized_mapjoin.q.out | 8 +- .../tez/vectorized_shufflejoin.q.out | 12 +- .../tez/vectorized_timestamp_funcs.q.out | 10 +- .../clientpositive/udaf_number_format.q.out | 4 +- .../clientpositive/udaf_percentile_approx_23.q.out | 16 +- ql/src/test/results/clientpositive/udf3.q.out | 4 +- ql/src/test/results/clientpositive/udf4.q.out | 4 +- ql/src/test/results/clientpositive/udf7.q.out | 2 +- ql/src/test/results/clientpositive/udf8.q.out | 8 +- ql/src/test/results/clientpositive/udf_case.q.out | 2 +- .../test/results/clientpositive/udf_coalesce.q.out | 2 +- ql/src/test/results/clientpositive/udf_elt.q.out | 2 +- .../test/results/clientpositive/udf_greatest.q.out | 2 +- ql/src/test/results/clientpositive/udf_if.q.out | 2 +- ql/src/test/results/clientpositive/udf_instr.q.out | 2 +- ql/src/test/results/clientpositive/udf_least.q.out | 2 +- .../test/results/clientpositive/udf_locate.q.out | 2 +- ql/src/test/results/clientpositive/udf_trunc.q.out | 4 +- ql/src/test/results/clientpositive/udf_when.q.out | 2 +- .../test/results/clientpositive/udtf_stack.q.out | 6 +- .../clientpositive/union_remove_6_subq.q.out | 8 +- .../clientpositive/vector_aggregate_9.q.out | 8 +- .../vector_aggregate_without_gby.q.out | 8 +- .../results/clientpositive/vector_coalesce.q.out | 80 ++--- .../clientpositive/vector_decimal_precision.q.out | 8 +- .../clientpositive/vector_decimal_udf.q.out | 8 +- .../test/results/clientpositive/vector_elt.q.out | 4 +- .../vector_interval_arithmetic.q.out | 16 +- .../clientpositive/vector_null_projection.q.out | 30 +- .../test/results/clientpositive/vector_nvl.q.out | 4 +- .../test/results/clientpositive/vector_udf1.q.out | 24 +- .../results/clientpositive/vectorization_0.q.out | 46 +-- .../clientpositive/vectorization_pushdown.q.out | 8 +- .../vectorization_short_regress.q.out | 40 +-- .../clientpositive/vectorized_distinct_gby.q.out | 4 +- .../clientpositive/vectorized_mapjoin.q.out | 8 +- .../clientpositive/vectorized_shufflejoin.q.out | 12 +- .../vectorized_timestamp_funcs.q.out | 10 +- 127 files changed, 1458 insertions(+), 1447 deletions(-) diff --git a/contrib/src/test/results/clientpositive/udaf_example_avg.q.out b/contrib/src/test/results/clientpositive/udaf_example_avg.q.out index 61926d4..4e6cb99 100644 --- a/contrib/src/test/results/clientpositive/udaf_example_avg.q.out +++ b/contrib/src/test/results/clientpositive/udaf_example_avg.q.out @@ -33,20 +33,20 @@ STAGE PLANS: aggregations: example_avg(_col0), example_avg(_col1) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct) Reduce Operator Tree: Group By Operator aggregations: example_avg(VALUE._col0), example_avg(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/contrib/src/test/results/clientpositive/udaf_example_max.q.out b/contrib/src/test/results/clientpositive/udaf_example_max.q.out index 932d8df..fc5a896 100644 --- a/contrib/src/test/results/clientpositive/udaf_example_max.q.out +++ b/contrib/src/test/results/clientpositive/udaf_example_max.q.out @@ -38,20 +38,20 @@ STAGE PLANS: aggregations: example_max(_col0), example_max(_col1) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Reduce Operator Tree: Group By Operator aggregations: example_max(VALUE._col0), example_max(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out b/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out index 16ae212..47d8f52 100644 --- a/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out +++ b/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out @@ -33,20 +33,20 @@ STAGE PLANS: aggregations: example_max_n(_col0, 10), example_max_n(_col2, 10) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct,n:int>), _col1 (type: struct,n:int>) Reduce Operator Tree: Group By Operator aggregations: example_max_n(VALUE._col0), example_max_n(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/contrib/src/test/results/clientpositive/udaf_example_min.q.out b/contrib/src/test/results/clientpositive/udaf_example_min.q.out index b0ffe53..feb2add 100644 --- a/contrib/src/test/results/clientpositive/udaf_example_min.q.out +++ b/contrib/src/test/results/clientpositive/udaf_example_min.q.out @@ -38,20 +38,20 @@ STAGE PLANS: aggregations: example_min(_col0), example_min(_col1) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Reduce Operator Tree: Group By Operator aggregations: example_min(VALUE._col0), example_min(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out b/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out index 7e7dd84..16c4684 100644 --- a/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out +++ b/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out @@ -33,20 +33,20 @@ STAGE PLANS: aggregations: example_min_n(_col0, 10), example_min_n(_col2, 10) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct,n:int>), _col1 (type: struct,n:int>) Reduce Operator Tree: Group By Operator aggregations: example_min_n(VALUE._col0), example_min_n(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java index 42cbc14..ab07fb6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java @@ -1171,7 +1171,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, ColStatistics cs = new ColStatistics(colName, colType); cs.setCountDistint(stats.getNumRows()); cs.setNumNulls(0); - cs.setAvgColLen(StatsUtils.getAvgColLenOfFixedLengthTypes(colType)); + cs.setAvgColLen(StatsUtils.getAvgColLenOf(conf, ci.getObjectInspector(), colType)); aggColStats.add(cs); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java index 7a15904..2a9dc11 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java @@ -424,7 +424,7 @@ private static void addParitionColumnStats(HiveConf conf, List neededCol long numPartitions = getNDVPartitionColumn(partList.getPartitions(), ci.getInternalName()); partCS.setCountDistint(numPartitions); - partCS.setAvgColLen(StatsUtils.getAvgColLenOfVariableLengthTypes(conf, + partCS.setAvgColLen(StatsUtils.getAvgColLenOf(conf, ci.getObjectInspector(), partCS.getColumnType())); partCS.setRange(getRangePartitionColumn(partList.getPartitions(), ci.getInternalName(), ci.getType().getTypeName(), conf.getVar(ConfVars.DEFAULTPARTITIONNAME))); @@ -543,7 +543,7 @@ public static int estimateRowSizeFromSchema(HiveConf conf, List sche || colTypeLowerCase.startsWith(serdeConstants.MAP_TYPE_NAME) || colTypeLowerCase.startsWith(serdeConstants.STRUCT_TYPE_NAME) || colTypeLowerCase.startsWith(serdeConstants.UNION_TYPE_NAME)) { - avgRowSize += getAvgColLenOfVariableLengthTypes(conf, oi, colTypeLowerCase); + avgRowSize += getAvgColLenOf(conf, oi, colTypeLowerCase); } else { avgRowSize += getAvgColLenOfFixedLengthTypes(colTypeLowerCase); } @@ -805,7 +805,7 @@ public static ColStatistics getColStatistics(ColumnStatisticsObj cso, String tab * - column type * @return raw data size */ - public static long getAvgColLenOfVariableLengthTypes(HiveConf conf, ObjectInspector oi, + public static long getAvgColLenOf(HiveConf conf, ObjectInspector oi, String colType) { long configVarLen = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_STATS_MAX_VARIABLE_LENGTH); @@ -872,7 +872,7 @@ public static long getAvgColLenOfVariableLengthTypes(HiveConf conf, ObjectInspec return getSizeOfComplexTypes(conf, oi); } - return 0; + throw new IllegalArgumentException("Size requested for unknown type: " + colType + " OI: " + oi.getTypeName()); } /** @@ -895,10 +895,10 @@ public static long getSizeOfComplexTypes(HiveConf conf, ObjectInspector oi) { if (colTypeLowerCase.equals(serdeConstants.STRING_TYPE_NAME) || colTypeLowerCase.startsWith(serdeConstants.VARCHAR_TYPE_NAME) || colTypeLowerCase.startsWith(serdeConstants.CHAR_TYPE_NAME)) { - int avgColLen = (int) getAvgColLenOfVariableLengthTypes(conf, oi, colTypeLowerCase); + int avgColLen = (int) getAvgColLenOf(conf, oi, colTypeLowerCase); result += JavaDataModel.get().lengthForStringOfLength(avgColLen); } else if (colTypeLowerCase.equals(serdeConstants.BINARY_TYPE_NAME)) { - int avgColLen = (int) getAvgColLenOfVariableLengthTypes(conf, oi, colTypeLowerCase); + int avgColLen = (int) getAvgColLenOf(conf, oi, colTypeLowerCase); result += JavaDataModel.get().lengthForByteArrayOfSize(avgColLen); } else { result += getAvgColLenOfFixedLengthTypes(colTypeLowerCase); @@ -989,11 +989,13 @@ public static long getAvgColLenOfFixedLengthTypes(String colType) { if (colTypeLowerCase.equals(serdeConstants.TINYINT_TYPE_NAME) || colTypeLowerCase.equals(serdeConstants.SMALLINT_TYPE_NAME) || colTypeLowerCase.equals(serdeConstants.INT_TYPE_NAME) + || colTypeLowerCase.equals(serdeConstants.VOID_TYPE_NAME) || colTypeLowerCase.equals(serdeConstants.BOOLEAN_TYPE_NAME) || colTypeLowerCase.equals(serdeConstants.FLOAT_TYPE_NAME)) { return JavaDataModel.get().primitive1(); } else if (colTypeLowerCase.equals(serdeConstants.DOUBLE_TYPE_NAME) || colTypeLowerCase.equals(serdeConstants.BIGINT_TYPE_NAME) + || colTypeLowerCase.equals(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME) || colTypeLowerCase.equals("long")) { return JavaDataModel.get().primitive2(); } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) { @@ -1002,8 +1004,10 @@ public static long getAvgColLenOfFixedLengthTypes(String colType) { return JavaDataModel.get().lengthOfDate(); } else if (colTypeLowerCase.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) { return JavaDataModel.get().lengthOfDecimal(); + } else if (colTypeLowerCase.equals(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME)) { + return JavaDataModel.JAVA32_META; } else { - return 0; + throw new IllegalArgumentException("Size requested for unknown type: " + colType); } } @@ -1225,7 +1229,7 @@ public static ColStatistics getColStatisticsFromExpression(HiveConf conf, Statis double avgColSize = 0; long countDistincts = 0; long numNulls = 0; - ObjectInspector oi = null; + ObjectInspector oi = end.getWritableObjectInspector(); long numRows = parentStats.getNumRows(); if (end instanceof ExprNodeColumnDesc) { @@ -1244,7 +1248,6 @@ public static ColStatistics getColStatisticsFromExpression(HiveConf conf, Statis // virtual columns colType = encd.getTypeInfo().getTypeName(); countDistincts = numRows; - oi = encd.getWritableObjectInspector(); } else { // clone the column stats and return @@ -1263,16 +1266,13 @@ public static ColStatistics getColStatisticsFromExpression(HiveConf conf, Statis // constant projection ExprNodeConstantDesc encd = (ExprNodeConstantDesc) end; - // null projection + colName = encd.getName(); + colType = encd.getTypeString(); if (encd.getValue() == null) { - colName = encd.getName(); - colType = serdeConstants.VOID_TYPE_NAME; + // null projection numNulls = numRows; } else { - colName = encd.getName(); - colType = encd.getTypeString(); countDistincts = 1; - oi = encd.getWritableObjectInspector(); } } else if (end instanceof ExprNodeGenericFuncDesc) { @@ -1281,7 +1281,6 @@ public static ColStatistics getColStatisticsFromExpression(HiveConf conf, Statis colName = engfd.getName(); colType = engfd.getTypeString(); countDistincts = getNDVFor(engfd, numRows, parentStats); - oi = engfd.getWritableObjectInspector(); } else if (end instanceof ExprNodeColumnListDesc) { // column list @@ -1289,7 +1288,6 @@ public static ColStatistics getColStatisticsFromExpression(HiveConf conf, Statis colName = Joiner.on(",").join(encd.getCols()); colType = serdeConstants.LIST_TYPE_NAME; countDistincts = numRows; - oi = encd.getWritableObjectInspector(); } else if (end instanceof ExprNodeFieldDesc) { // field within complex type @@ -1297,25 +1295,12 @@ public static ColStatistics getColStatisticsFromExpression(HiveConf conf, Statis colName = enfd.getFieldName(); colType = enfd.getTypeString(); countDistincts = numRows; - oi = enfd.getWritableObjectInspector(); } else { throw new IllegalArgumentException("not supported expr type " + end.getClass()); } colType = colType.toLowerCase(); - if (colType.equals(serdeConstants.STRING_TYPE_NAME) - || colType.equals(serdeConstants.BINARY_TYPE_NAME) - || colType.startsWith(serdeConstants.VARCHAR_TYPE_NAME) - || colType.startsWith(serdeConstants.CHAR_TYPE_NAME) - || colType.startsWith(serdeConstants.LIST_TYPE_NAME) - || colType.startsWith(serdeConstants.MAP_TYPE_NAME) - || colType.startsWith(serdeConstants.STRUCT_TYPE_NAME) - || colType.startsWith(serdeConstants.UNION_TYPE_NAME)) { - avgColSize = getAvgColLenOfVariableLengthTypes(conf, oi, colType); - } else { - avgColSize = getAvgColLenOfFixedLengthTypes(colType); - } - + avgColSize = getAvgColLenOf(conf, oi, colType); ColStatistics colStats = new ColStatistics(colName, colType); colStats.setAvgColLen(avgColSize); colStats.setCountDistint(countDistincts); @@ -1456,7 +1441,7 @@ public static long getDataSizeFromColumnStats(long numRows, List for (ColStatistics cs : colStats) { if (cs != null) { String colTypeLowerCase = cs.getColumnType().toLowerCase(); - long nonNullCount = numRows - cs.getNumNulls(); + long nonNullCount = cs.getNumNulls() > 0 ? numRows - cs.getNumNulls() + 1 : numRows; double sizeOf = 0; if (colTypeLowerCase.equals(serdeConstants.TINYINT_TYPE_NAME) || colTypeLowerCase.equals(serdeConstants.SMALLINT_TYPE_NAME) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java index 6799978..5ad5c06 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java @@ -28,7 +28,9 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AbstractAggregationBuffer; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationType; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -358,10 +360,16 @@ protected HiveDecimalWritable getNextResult( } } - private static class AverageAggregationBuffer implements AggregationBuffer { + @AggregationType(estimable = true) + private static class AverageAggregationBuffer extends AbstractAggregationBuffer { private HashSet uniqueObjects; // Unique rows. private long count; private TYPE sum; + + @Override + public int estimate() { + return 2*JavaDataModel.PRIMITIVES2; + } }; @SuppressWarnings("unchecked") diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java index 43b23fa..763bfd5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; import org.apache.hadoop.hive.ql.udf.UDFType; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationType; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -79,8 +80,13 @@ public ObjectInspector init(Mode m, ObjectInspector[] parameters) } /** class for storing the current max value */ + @AggregationType(estimable = true) static class MaxAgg extends AbstractAggregationBuffer { Object o; + @Override + public int estimate() { + return JavaDataModel.PRIMITIVES2; + } } @Override @@ -138,7 +144,7 @@ public GenericUDAFEvaluator getWindowingEvaluator(WindowFrameDef wFrmDef) { /* * Based on the Paper by Daniel Lemire: Streaming Max-Min filter using no more * than 3 comparisons per elem. - * + * * 1. His algorithm works on fixed size windows up to the current row. For row * 'i' and window 'w' it computes the min/max for window (i-w, i). 2. The core * idea is to keep a queue of (max, idx) tuples. A tuple in the queue @@ -150,7 +156,7 @@ public GenericUDAFEvaluator getWindowingEvaluator(WindowFrameDef wFrmDef) { * element at the front of the queue has reached its max range of influence; * i.e. frontTuple.idx + w > i. If yes we can remove it from the queue. - on * the ith step o/p the front of the queue as the max for the ith entry. - * + * * Here we modify the algorithm: 1. to handle window's that are of the form * (i-p, i+f), where p is numPreceding,f = numFollowing - we start outputing * rows only after receiving f rows. - the formula for 'influence range' of an @@ -192,6 +198,7 @@ public int estimate() { + (3 * JavaDataModel.PRIMITIVES1); } + @Override protected void reset() { maxChain.clear(); super.reset(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java index 70e0db1..132bad6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java @@ -26,7 +26,9 @@ import org.apache.hadoop.hive.ql.plan.ptf.BoundaryDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; import org.apache.hadoop.hive.ql.udf.UDFType; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationType; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax.MaxStreamingFixedWindow; +import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.FullMapEqualComparer; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -76,8 +78,13 @@ public ObjectInspector init(Mode m, ObjectInspector[] parameters) } /** class for storing the current max value */ + @AggregationType(estimable = true) static class MinAgg extends AbstractAggregationBuffer { Object o; + @Override + public int estimate() { + return JavaDataModel.PRIMITIVES2; + } } @Override @@ -139,14 +146,17 @@ public MinStreamingFixedWindow(GenericUDAFEvaluator wrappedEval, super(wrappedEval, wFrmDef); } + @Override protected ObjectInspector inputOI() { return ((GenericUDAFMinEvaluator) wrappedEval).inputOI; } + @Override protected ObjectInspector outputOI() { return ((GenericUDAFMinEvaluator) wrappedEval).outputOI; } + @Override protected boolean removeLast(Object in, Object last) { return isLess(in, last); } diff --git a/ql/src/test/queries/clientpositive/vector_coalesce.q b/ql/src/test/queries/clientpositive/vector_coalesce.q index b1a7766..cfba7be 100644 --- a/ql/src/test/queries/clientpositive/vector_coalesce.q +++ b/ql/src/test/queries/clientpositive/vector_coalesce.q @@ -1,3 +1,4 @@ +set hive.stats.fetch.column.stats=true; set hive.explain.user=false; SET hive.vectorized.execution.enabled=true; diff --git a/ql/src/test/results/clientnegative/udf_assert_true.q.out b/ql/src/test/results/clientnegative/udf_assert_true.q.out index baa9074..7fc50d6 100644 --- a/ql/src/test/results/clientnegative/udf_assert_true.q.out +++ b/ql/src/test/results/clientnegative/udf_assert_true.q.out @@ -28,13 +28,13 @@ STAGE PLANS: Select Operator expressions: assert_true((_col5 > 0)) (type: void) outputColumnNames: _col0 - Statistics: Num rows: 1000 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 2 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -52,13 +52,13 @@ STAGE PLANS: Select Operator expressions: assert_true((_col5 > 0)) (type: void) outputColumnNames: _col0 - Statistics: Num rows: 1000 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 2 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -105,13 +105,13 @@ STAGE PLANS: Select Operator expressions: assert_true((_col5 < 2)) (type: void) outputColumnNames: _col0 - Statistics: Num rows: 1000 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 2 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -129,13 +129,13 @@ STAGE PLANS: Select Operator expressions: assert_true((_col5 < 2)) (type: void) outputColumnNames: _col0 - Statistics: Num rows: 1000 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 2 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/acid_table_stats.q.out b/ql/src/test/results/clientpositive/acid_table_stats.q.out index 4d51511..77771fe 100644 --- a/ql/src/test/results/clientpositive/acid_table_stats.q.out +++ b/ql/src/test/results/clientpositive/acid_table_stats.q.out @@ -94,7 +94,7 @@ Partition Parameters: numFiles 2 numRows 0 rawDataSize 0 - totalSize 3837 + totalSize 3852 #### A masked pattern was here #### # Storage Information @@ -132,9 +132,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid - Statistics: Num rows: 1 Data size: 3837 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 3852 Basic stats: PARTIAL Column stats: NONE Select Operator - Statistics: Num rows: 1 Data size: 3837 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 3852 Basic stats: PARTIAL Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -211,7 +211,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 208000 - totalSize 3837 + totalSize 3852 #### A masked pattern was here #### # Storage Information @@ -260,7 +260,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 208000 - totalSize 3837 + totalSize 3852 #### A masked pattern was here #### # Storage Information @@ -387,7 +387,7 @@ Partition Parameters: numFiles 4 numRows 1000 rawDataSize 208000 - totalSize 7689 + totalSize 7704 #### A masked pattern was here #### # Storage Information @@ -436,7 +436,7 @@ Partition Parameters: numFiles 4 numRows 2000 rawDataSize 416000 - totalSize 7689 + totalSize 7704 #### A masked pattern was here #### # Storage Information @@ -539,20 +539,20 @@ STAGE PLANS: aggregations: max(key) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reduce Operator Tree: Group By Operator aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/annotate_stats_deep_filters.q.out b/ql/src/test/results/clientpositive/annotate_stats_deep_filters.q.out index b7a87fd..32644dc 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_deep_filters.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_deep_filters.q.out @@ -118,12 +118,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: over1k - Statistics: Num rows: 2098 Data size: 16736 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2098 Data size: 16744 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (((t = 1) and (si = 2)) or ((t = 2) and (si = 3)) or ((t = 3) and (si = 4)) or ((t = 4) and (si = 5)) or ((t = 5) and (si = 6)) or ((t = 6) and (si = 7)) or ((t = 7) and (si = 8)) or ((t = 9) and (si = 10)) or ((t = 10) and (si = 11)) or ((t = 11) and (si = 12)) or ((t = 12) and (si = 13)) or ((t = 13) and (si = 14)) or ((t = 14) and (si = 15)) or ((t = 15) and (si = 16)) or ((t = 16) and (si = 17)) or ((t = 17) and (si = 18)) or ((t = 27) and (si = 28)) or ((t = 37) and (si = 38)) or ((t = 47) and (si = 48)) or ((t = 52) and (si = 53))) (type: boolean) - Statistics: Num rows: 300 Data size: 2392 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 300 Data size: 2400 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 300 Data size: 2392 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 300 Data size: 2400 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() mode: hash diff --git a/ql/src/test/results/clientpositive/annotate_stats_filter.q.out b/ql/src/test/results/clientpositive/annotate_stats_filter.q.out index a606e30..bd0b3bb 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_filter.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_filter.q.out @@ -141,7 +141,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (state = 'OH') (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -181,17 +181,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (state <> 'OH') (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -217,17 +217,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (state <> 'OH') (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -257,17 +257,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: zip is null (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), null (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -295,17 +295,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: zip is null (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), null (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -335,17 +335,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: zip is not null (type: boolean) - Statistics: Num rows: 7 Data size: 702 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 7 Data size: 702 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 7 Data size: 702 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -373,17 +373,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: zip is not null (type: boolean) - Statistics: Num rows: 7 Data size: 702 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 7 Data size: 702 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 7 Data size: 702 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -413,11 +413,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- numRows: 0 rawDataSize: 0 @@ -436,7 +436,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -476,11 +476,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- numRows: 8 rawDataSize: 804 @@ -499,17 +499,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: 'foo' (type: string) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -537,11 +537,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- numRows: 0 rawDataSize: 0 @@ -560,7 +560,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -598,7 +598,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -636,7 +636,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -676,7 +676,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((state = 'OH') or (state = 'CA')) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE @@ -716,7 +716,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -754,7 +754,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -794,7 +794,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (((year = 2001) and year is null) or (state = 'CA')) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE @@ -834,7 +834,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (((year = 2001) or year is null) and (state = 'CA')) (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -876,17 +876,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid < 30) (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -912,7 +912,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid > 30) (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -948,17 +948,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid <= 30) (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -984,7 +984,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid >= 30) (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -1024,7 +1024,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid < 3) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE @@ -1060,7 +1060,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid > 3) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE @@ -1096,7 +1096,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid <= 3) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE @@ -1132,7 +1132,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid >= 3) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE diff --git a/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out b/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out index 3070407..f6971a0 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out @@ -248,21 +248,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: year (type: int) outputColumnNames: year - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator keys: year (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: int) @@ -682,11 +682,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: year (type: int) outputColumnNames: year - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator keys: year (type: int) mode: hash diff --git a/ql/src/test/results/clientpositive/annotate_stats_join.q.out b/ql/src/test/results/clientpositive/annotate_stats_join.q.out index 4398f1b..0c21c66 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_join.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_join.q.out @@ -560,19 +560,19 @@ STAGE PLANS: value expressions: _col1 (type: string) TableScan alias: l - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: locid is not null (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col2 (type: bigint), _col3 (type: int) Reduce Operator Tree: Join Operator @@ -648,19 +648,19 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 570 Basic stats: COMPLETE Column stats: COMPLETE TableScan alias: l - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (state is not null and locid is not null) (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: int) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: int) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: bigint), _col3 (type: int) Reduce Operator Tree: Join Operator diff --git a/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out b/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out index 64a57fe..224f1ff 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out @@ -289,19 +289,19 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE TableScan alias: ss - Statistics: Num rows: 1000 Data size: 3856 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 3860 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ss_store_sk is not null (type: boolean) - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ss_store_sk (type: int) outputColumnNames: _col0 - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE Reduce Operator Tree: Join Operator condition map: @@ -354,19 +354,19 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE TableScan alias: ss - Statistics: Num rows: 1000 Data size: 3856 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 3860 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (ss_store_sk > 0) (type: boolean) - Statistics: Num rows: 1000 Data size: 3856 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 3860 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ss_store_sk (type: int) outputColumnNames: _col0 - Statistics: Num rows: 1000 Data size: 3856 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 3860 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1000 Data size: 3856 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 3860 Basic stats: COMPLETE Column stats: COMPLETE Reduce Operator Tree: Join Operator condition map: @@ -419,19 +419,19 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: PARTIAL TableScan alias: ss - Statistics: Num rows: 1000 Data size: 7668 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 7676 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((ss_quantity > 10) and ss_store_sk is not null) (type: boolean) - Statistics: Num rows: 321 Data size: 2460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 321 Data size: 2468 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ss_store_sk (type: int) outputColumnNames: _col0 - Statistics: Num rows: 321 Data size: 2460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 321 Data size: 2468 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 321 Data size: 2460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 321 Data size: 2468 Basic stats: COMPLETE Column stats: COMPLETE Reduce Operator Tree: Join Operator condition map: @@ -484,19 +484,19 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE TableScan alias: ss - Statistics: Num rows: 1000 Data size: 3856 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 3860 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ss_store_sk is not null (type: boolean) - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ss_store_sk (type: int) outputColumnNames: _col0 - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE Reduce Operator Tree: Join Operator condition map: @@ -549,19 +549,19 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE TableScan alias: ss - Statistics: Num rows: 1000 Data size: 7668 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 7676 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((ss_quantity > 10) and ss_store_sk is not null) (type: boolean) - Statistics: Num rows: 321 Data size: 2460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 321 Data size: 2468 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ss_store_sk (type: int) outputColumnNames: _col0 - Statistics: Num rows: 321 Data size: 2460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 321 Data size: 2468 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 321 Data size: 2460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 321 Data size: 2468 Basic stats: COMPLETE Column stats: COMPLETE Reduce Operator Tree: Join Operator condition map: @@ -599,19 +599,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: ss - Statistics: Num rows: 1000 Data size: 3856 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 3860 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ss_store_sk is not null (type: boolean) - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ss_store_sk (type: int) outputColumnNames: _col0 - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE TableScan alias: s Statistics: Num rows: 12 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE @@ -685,7 +685,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: ss - Statistics: Num rows: 1000 Data size: 3856 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 3860 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (ss_store_sk > 1000) (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE @@ -771,19 +771,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: ss - Statistics: Num rows: 1000 Data size: 3856 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 3860 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ss_store_sk is not null (type: boolean) - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ss_store_sk (type: int) outputColumnNames: _col0 - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 964 Data size: 3716 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 964 Data size: 3720 Basic stats: COMPLETE Column stats: COMPLETE TableScan alias: s Statistics: Num rows: 12 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE @@ -857,19 +857,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: ss - Statistics: Num rows: 1000 Data size: 7668 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 7676 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((ss_quantity > 10) and ss_store_sk is not null) (type: boolean) - Statistics: Num rows: 321 Data size: 2460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 321 Data size: 2468 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ss_store_sk (type: int) outputColumnNames: _col0 - Statistics: Num rows: 321 Data size: 2460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 321 Data size: 2468 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 321 Data size: 2460 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 321 Data size: 2468 Basic stats: COMPLETE Column stats: COMPLETE TableScan alias: s Statistics: Num rows: 12 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE @@ -944,19 +944,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: ss - Statistics: Num rows: 1000 Data size: 7656 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000 Data size: 7664 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (ss_store_sk is not null and ss_addr_sk is not null) (type: boolean) - Statistics: Num rows: 916 Data size: 7012 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 916 Data size: 7020 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ss_addr_sk (type: int), ss_store_sk (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 916 Data size: 7012 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 916 Data size: 7020 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) - Statistics: Num rows: 916 Data size: 7012 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 916 Data size: 7020 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int) TableScan alias: s diff --git a/ql/src/test/results/clientpositive/annotate_stats_limit.q.out b/ql/src/test/results/clientpositive/annotate_stats_limit.q.out index 3c4109b..2610eb1 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_limit.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_limit.q.out @@ -76,11 +76,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- numRows: 4 rawDataSize: 396 @@ -99,14 +99,14 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 4 - Statistics: Num rows: 4 Data size: 396 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 4 Data size: 408 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- greater than the available number of rows @@ -127,14 +127,14 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 16 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- numRows: 0 rawDataSize: 0 @@ -153,11 +153,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 0 Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE diff --git a/ql/src/test/results/clientpositive/annotate_stats_select.q.out b/ql/src/test/results/clientpositive/annotate_stats_select.q.out index 03944fa..c51b895 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_select.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_select.q.out @@ -284,7 +284,7 @@ STAGE PLANS: Select Operator expressions: null (type: void) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- numRows: 2 rawDataSize: 8 @@ -510,7 +510,7 @@ STAGE PLANS: Select Operator expressions: null (type: date) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 56 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- numRows: 2 rawDataSize: 224 diff --git a/ql/src/test/results/clientpositive/annotate_stats_union.q.out b/ql/src/test/results/clientpositive/annotate_stats_union.q.out index c49083b..c03aa84 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_union.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_union.q.out @@ -152,11 +152,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- numRows: 16 rawDataSize: 1592 @@ -175,32 +175,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Union - Statistics: Num rows: 16 Data size: 1608 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 16 Data size: 1632 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 16 Data size: 1608 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 16 Data size: 1632 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Union - Statistics: Num rows: 16 Data size: 1608 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 16 Data size: 1632 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 16 Data size: 1608 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 16 Data size: 1632 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/autoColumnStats_4.q.out b/ql/src/test/results/clientpositive/autoColumnStats_4.q.out index bf4e0bb..c7b9b4f 100644 --- a/ql/src/test/results/clientpositive/autoColumnStats_4.q.out +++ b/ql/src/test/results/clientpositive/autoColumnStats_4.q.out @@ -118,7 +118,7 @@ STAGE PLANS: aggregations: compute_stats(a, 16), compute_stats(b, 16) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 968 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -152,17 +152,17 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 968 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 972 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 972 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/autoColumnStats_7.q.out b/ql/src/test/results/clientpositive/autoColumnStats_7.q.out index 9422d65..2b3bbce 100644 --- a/ql/src/test/results/clientpositive/autoColumnStats_7.q.out +++ b/ql/src/test/results/clientpositive/autoColumnStats_7.q.out @@ -137,7 +137,7 @@ STAGE PLANS: aggregations: compute_stats(VALUE._col0, 16), compute_stats(VALUE._col2, 16), compute_stats(VALUE._col3, 16) mode: partial1 outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1460 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -151,17 +151,17 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1460 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2) mode: final outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/autoColumnStats_9.q.out b/ql/src/test/results/clientpositive/autoColumnStats_9.q.out index da8b19c..f70aa58 100644 --- a/ql/src/test/results/clientpositive/autoColumnStats_9.q.out +++ b/ql/src/test/results/clientpositive/autoColumnStats_9.q.out @@ -93,7 +93,7 @@ STAGE PLANS: aggregations: compute_stats(key, 16), compute_stats(value, 16) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 968 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -149,7 +149,7 @@ STAGE PLANS: aggregations: compute_stats(key, 16), compute_stats(value, 16) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 968 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -185,17 +185,17 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 968 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 972 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 972 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/cbo_rp_annotate_stats_groupby.q.out b/ql/src/test/results/clientpositive/cbo_rp_annotate_stats_groupby.q.out index e19bb9e..f5b4375 100644 --- a/ql/src/test/results/clientpositive/cbo_rp_annotate_stats_groupby.q.out +++ b/ql/src/test/results/clientpositive/cbo_rp_annotate_stats_groupby.q.out @@ -248,21 +248,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: year (type: int) outputColumnNames: year - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator keys: year (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: int) @@ -700,11 +700,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: year (type: int) outputColumnNames: year - Statistics: Num rows: 8 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator keys: year (type: int) mode: hash diff --git a/ql/src/test/results/clientpositive/cbo_rp_auto_join0.q.out b/ql/src/test/results/clientpositive/cbo_rp_auto_join0.q.out index adcd19d..942e447 100644 --- a/ql/src/test/results/clientpositive/cbo_rp_auto_join0.q.out +++ b/ql/src/test/results/clientpositive/cbo_rp_auto_join0.q.out @@ -38,14 +38,14 @@ STAGE PLANS: a:cbo_t1:cbo_t3 TableScan alias: cbo_t3 - Statistics: Num rows: 20 Data size: 3060 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 3230 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (key < 10) (type: boolean) - Statistics: Num rows: 6 Data size: 850 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 6 Data size: 1020 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 850 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 6 Data size: 1020 Basic stats: COMPLETE Column stats: COMPLETE HashTable Sink Operator keys: 0 @@ -56,14 +56,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cbo_t3 - Statistics: Num rows: 20 Data size: 3060 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 3230 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (key < 10) (type: boolean) - Statistics: Num rows: 6 Data size: 850 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 6 Data size: 1020 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 850 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 6 Data size: 1020 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: Inner Join 0 to 1 @@ -171,14 +171,14 @@ STAGE PLANS: a:cbo_t1:cbo_t3 TableScan alias: cbo_t3 - Statistics: Num rows: 20 Data size: 3060 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 3230 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (key < 10) (type: boolean) - Statistics: Num rows: 6 Data size: 850 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 6 Data size: 1020 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 850 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 6 Data size: 1020 Basic stats: COMPLETE Column stats: COMPLETE HashTable Sink Operator keys: 0 @@ -189,14 +189,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cbo_t3 - Statistics: Num rows: 20 Data size: 3060 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 3230 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (key < 10) (type: boolean) - Statistics: Num rows: 6 Data size: 850 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 6 Data size: 1020 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 850 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 6 Data size: 1020 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: Inner Join 0 to 1 diff --git a/ql/src/test/results/clientpositive/cbo_rp_groupby3_noskew_multi_distinct.q.out b/ql/src/test/results/clientpositive/cbo_rp_groupby3_noskew_multi_distinct.q.out index 95233b0..c09764c 100644 --- a/ql/src/test/results/clientpositive/cbo_rp_groupby3_noskew_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/cbo_rp_groupby3_noskew_multi_distinct.q.out @@ -61,14 +61,14 @@ STAGE PLANS: aggregations: sum(KEY._col0:0._col0), avg(KEY._col0:0._col0), avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), min(KEY._col0:0._col0), std(KEY._col0:0._col0), stddev_samp(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0), sum(DISTINCT KEY._col0:1._col0), count(DISTINCT KEY._col0:2._col0) mode: complete outputColumnNames: $f0, $f1, $f2, $f3, $f4, $f5, $f6, $f7, $f8, $f9, $f10 - Statistics: Num rows: 1 Data size: 240 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: $f0 (type: double), $f1 (type: double), $f2 (type: double), UDFToDouble($f3) (type: double), UDFToDouble($f4) (type: double), $f5 (type: double), $f6 (type: double), $f7 (type: double), $f8 (type: double), $f9 (type: double), UDFToDouble($f10) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1 Data size: 240 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 240 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/cbo_rp_join0.q.out b/ql/src/test/results/clientpositive/cbo_rp_join0.q.out index e807f30..149383a 100644 --- a/ql/src/test/results/clientpositive/cbo_rp_join0.q.out +++ b/ql/src/test/results/clientpositive/cbo_rp_join0.q.out @@ -20,48 +20,48 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cbo_t1 - Statistics: Num rows: 20 Data size: 1602 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1691 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), c_int (type: int) outputColumnNames: key, c_int - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE value expressions: c_int (type: int) TableScan alias: cbo_t2:cbo_t2 - Statistics: Num rows: 20 Data size: 1602 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1691 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), c_int (type: int) outputColumnNames: key, c_int - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE value expressions: c_int (type: int) TableScan alias: cbo_t3:cbo_t3 - Statistics: Num rows: 20 Data size: 1530 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1615 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 20 Data size: 1530 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1615 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 20 Data size: 1530 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1615 Basic stats: COMPLETE Column stats: COMPLETE Reduce Operator Tree: Join Operator condition map: @@ -669,60 +669,60 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cbo_t1 - Statistics: Num rows: 20 Data size: 1602 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1691 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), c_int (type: int) outputColumnNames: key, c_int - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE value expressions: c_int (type: int) TableScan alias: cbo_t2:cbo_t2 - Statistics: Num rows: 20 Data size: 1602 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1691 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), c_int (type: int) outputColumnNames: key, c_int - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 18 Data size: 1424 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 1513 Basic stats: COMPLETE Column stats: COMPLETE value expressions: c_int (type: int) TableScan alias: cbo_t3:cbo_t3 - Statistics: Num rows: 20 Data size: 1530 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1615 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 20 Data size: 1530 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1615 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 20 Data size: 1530 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1615 Basic stats: COMPLETE Column stats: COMPLETE TableScan alias: cbo_t4:cbo_t1 - Statistics: Num rows: 20 Data size: 1602 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1691 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), c_int (type: int) outputColumnNames: key, c_int - Statistics: Num rows: 20 Data size: 1602 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1691 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 20 Data size: 1602 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 20 Data size: 1691 Basic stats: COMPLETE Column stats: COMPLETE value expressions: c_int (type: int) Reduce Operator Tree: Join Operator diff --git a/ql/src/test/results/clientpositive/cbo_rp_udaf_percentile_approx_23.q.out b/ql/src/test/results/clientpositive/cbo_rp_udaf_percentile_approx_23.q.out index 8b8df12..289e3e7 100644 --- a/ql/src/test/results/clientpositive/cbo_rp_udaf_percentile_approx_23.q.out +++ b/ql/src/test/results/clientpositive/cbo_rp_udaf_percentile_approx_23.q.out @@ -531,20 +531,20 @@ STAGE PLANS: aggregations: percentile_approx($f0, 0.5) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: array) Reduce Operator Tree: Group By Operator aggregations: percentile_approx(VALUE._col0) mode: mergepartial outputColumnNames: $f0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -592,20 +592,20 @@ STAGE PLANS: aggregations: percentile_approx($f0, 0.5) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: array) Reduce Operator Tree: Group By Operator aggregations: percentile_approx(VALUE._col0) mode: mergepartial outputColumnNames: $f0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/constantfolding.q.out b/ql/src/test/results/clientpositive/constantfolding.q.out index 1e86127..c5962db 100644 --- a/ql/src/test/results/clientpositive/constantfolding.q.out +++ b/ql/src/test/results/clientpositive/constantfolding.q.out @@ -223,7 +223,7 @@ STAGE PLANS: Select Operator expressions: null (type: date) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 56 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE @@ -277,7 +277,7 @@ STAGE PLANS: Select Operator expressions: 1.098612288668 (type: double), null (type: double), null (type: double), 1.098612288668 (type: double), null (type: double), null (type: double), 1.584962500721 (type: double), null (type: double), null (type: double), 0.47712125472 (type: double), null (type: double), null (type: double), 1.584962500721 (type: double), null (type: double), null (type: double), null (type: double), -1.0 (type: double), 7.389056098931 (type: double), 8.0 (type: double), 8.0 (type: double), 0.125 (type: double), 8.0 (type: double), 2.0 (type: double), NaN (type: double), 1.0 (type: double), 1.0 (type: double), 8.0 (type: double), 8.0 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27 - Statistics: Num rows: 1 Data size: 136 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT ROUND(LN(3.0),12), LN(0.0), LN(-1), ROUND(LOG(3.0),12), LOG(0.0), diff --git a/ql/src/test/results/clientpositive/create_genericudaf.q.out b/ql/src/test/results/clientpositive/create_genericudaf.q.out index 618fe0e..dd2ce7f 100644 --- a/ql/src/test/results/clientpositive/create_genericudaf.q.out +++ b/ql/src/test/results/clientpositive/create_genericudaf.q.out @@ -47,20 +47,20 @@ STAGE PLANS: aggregations: test_avg(1), test_avg(_col1) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 332 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 332 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct) Reduce Operator Tree: Group By Operator aggregations: test_avg(VALUE._col0), test_avg(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 332 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 332 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/decimal_precision.q.out b/ql/src/test/results/clientpositive/decimal_precision.q.out index 23eda1c..cb17e0d 100644 --- a/ql/src/test/results/clientpositive/decimal_precision.q.out +++ b/ql/src/test/results/clientpositive/decimal_precision.q.out @@ -545,20 +545,20 @@ STAGE PLANS: aggregations: avg(dec), sum(dec) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: decimal(30,10)) Reduce Operator Tree: Group By Operator aggregations: avg(VALUE._col0), sum(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/decimal_stats.q.out b/ql/src/test/results/clientpositive/decimal_stats.q.out index 6bcf3fa..5af58fb 100644 --- a/ql/src/test/results/clientpositive/decimal_stats.q.out +++ b/ql/src/test/results/clientpositive/decimal_stats.q.out @@ -63,27 +63,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_1 - Statistics: Num rows: 500 Data size: 112000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 112112 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: t (type: decimal(4,2)), u (type: decimal(5,0)), v (type: decimal(10,0)) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 500 Data size: 112000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 112112 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 500 Data size: 112000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 112112 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: decimal(4,2)), _col1 (type: decimal(5,0)), _col2 (type: decimal(10,0)) Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: decimal(4,2)), VALUE._col1 (type: decimal(5,0)), VALUE._col2 (type: decimal(10,0)) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 500 Data size: 112000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 112112 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 100 - Statistics: Num rows: 100 Data size: 22400 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 100 Data size: 22512 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 100 Data size: 22400 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 100 Data size: 22512 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/decimal_udf.q.out b/ql/src/test/results/clientpositive/decimal_udf.q.out index 0a7f310..47d2103 100644 --- a/ql/src/test/results/clientpositive/decimal_udf.q.out +++ b/ql/src/test/results/clientpositive/decimal_udf.q.out @@ -2042,20 +2042,20 @@ STAGE PLANS: aggregations: histogram_numeric(_col0, 3) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: array) Reduce Operator Tree: Group By Operator aggregations: histogram_numeric(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 720 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 720 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/fetch_aggregation.q.out b/ql/src/test/results/clientpositive/fetch_aggregation.q.out index 91f47f8..f20320f 100644 --- a/ql/src/test/results/clientpositive/fetch_aggregation.q.out +++ b/ql/src/test/results/clientpositive/fetch_aggregation.q.out @@ -23,7 +23,7 @@ STAGE PLANS: aggregations: count(key), sum(key), avg(key), min(key), max(key), std(key), variance(key) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 800 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -39,7 +39,7 @@ STAGE PLANS: aggregations: count(_col0), sum(_col1), avg(_col2), min(_col3), max(_col4), std(_col5), variance(_col6) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 800 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: select count(key),sum(key),avg(key),min(key),max(key),std(key),variance(key) from src diff --git a/ql/src/test/results/clientpositive/fold_case.q.out b/ql/src/test/results/clientpositive/fold_case.q.out index ec99197..acf1e4c 100644 --- a/ql/src/test/results/clientpositive/fold_case.q.out +++ b/ql/src/test/results/clientpositive/fold_case.q.out @@ -415,10 +415,10 @@ STAGE PLANS: Select Operator expressions: null (type: void) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -503,6 +503,6 @@ STAGE PLANS: Select Operator expressions: null (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 84 Basic stats: COMPLETE Column stats: COMPLETE ListSink diff --git a/ql/src/test/results/clientpositive/groupby3.q.out b/ql/src/test/results/clientpositive/groupby3.q.out index 4247d28..2ebeae4 100644 --- a/ql/src/test/results/clientpositive/groupby3.q.out +++ b/ql/src/test/results/clientpositive/groupby3.q.out @@ -59,7 +59,7 @@ STAGE PLANS: aggregations: sum(KEY._col0:0._col0), avg(KEY._col0:0._col0), avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), min(KEY._col0:0._col0), std(KEY._col0:0._col0), stddev_samp(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0) mode: partial1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -73,21 +73,21 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: struct), _col2 (type: struct), _col3 (type: string), _col4 (type: string), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct), _col8 (type: struct) Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8) mode: final outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/groupby3_map.q.out b/ql/src/test/results/clientpositive/groupby3_map.q.out index 405daa9..07c122e 100644 --- a/ql/src/test/results/clientpositive/groupby3_map.q.out +++ b/ql/src/test/results/clientpositive/groupby3_map.q.out @@ -64,14 +64,14 @@ STAGE PLANS: aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(DISTINCT KEY._col0:0._col0), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1216 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1216 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1216 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/groupby3_map_multi_distinct.q.out b/ql/src/test/results/clientpositive/groupby3_map_multi_distinct.q.out index a1d24e8..a4501f7 100644 --- a/ql/src/test/results/clientpositive/groupby3_map_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/groupby3_map_multi_distinct.q.out @@ -68,14 +68,14 @@ STAGE PLANS: aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(DISTINCT KEY._col0:0._col0), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8), sum(DISTINCT KEY._col0:1._col0), count(DISTINCT KEY._col0:2._col0) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1 Data size: 248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1232 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double), _col9 (type: double), UDFToDouble(_col10) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1 Data size: 248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1232 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1232 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/groupby3_map_skew.q.out b/ql/src/test/results/clientpositive/groupby3_map_skew.q.out index 5679770..e02bdeb 100644 --- a/ql/src/test/results/clientpositive/groupby3_map_skew.q.out +++ b/ql/src/test/results/clientpositive/groupby3_map_skew.q.out @@ -66,7 +66,7 @@ STAGE PLANS: aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(DISTINCT KEY._col0:0._col0), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8) mode: partials outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -80,21 +80,21 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: struct), _col2 (type: struct), _col3 (type: string), _col4 (type: string), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct), _col8 (type: struct) Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8) mode: final outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/groupby3_noskew.q.out b/ql/src/test/results/clientpositive/groupby3_noskew.q.out index e6c13cf..624fd2d 100644 --- a/ql/src/test/results/clientpositive/groupby3_noskew.q.out +++ b/ql/src/test/results/clientpositive/groupby3_noskew.q.out @@ -57,14 +57,14 @@ STAGE PLANS: aggregations: sum(KEY._col0:0._col0), avg(KEY._col0:0._col0), avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), min(KEY._col0:0._col0), std(KEY._col0:0._col0), stddev_samp(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0) mode: complete outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/groupby3_noskew_multi_distinct.q.out b/ql/src/test/results/clientpositive/groupby3_noskew_multi_distinct.q.out index f6be869..a1d403d 100644 --- a/ql/src/test/results/clientpositive/groupby3_noskew_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/groupby3_noskew_multi_distinct.q.out @@ -61,14 +61,14 @@ STAGE PLANS: aggregations: sum(KEY._col0:0._col0), avg(KEY._col0:0._col0), avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), min(KEY._col0:0._col0), std(KEY._col0:0._col0), stddev_samp(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0), sum(DISTINCT KEY._col0:1._col0), count(DISTINCT KEY._col0:2._col0) mode: complete outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1 Data size: 240 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double), _col9 (type: double), UDFToDouble(_col10) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1 Data size: 240 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 240 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/interval_arithmetic.q.out b/ql/src/test/results/clientpositive/interval_arithmetic.q.out index 27db395..72e602c 100644 --- a/ql/src/test/results/clientpositive/interval_arithmetic.q.out +++ b/ql/src/test/results/clientpositive/interval_arithmetic.q.out @@ -288,10 +288,10 @@ STAGE PLANS: Select Operator expressions: 5-5 (type: interval_year_month), -1-1 (type: interval_year_month) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 12288 Data size: 196608 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 2 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: select @@ -586,10 +586,10 @@ STAGE PLANS: Select Operator expressions: 109 20:30:40.246913578 (type: interval_day_time), 89 02:14:26.000000000 (type: interval_day_time) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 12288 Data size: 294912 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 2 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: select diff --git a/ql/src/test/results/clientpositive/literal_decimal.q.out b/ql/src/test/results/clientpositive/literal_decimal.q.out index 0b6299b..6f686eb 100644 --- a/ql/src/test/results/clientpositive/literal_decimal.q.out +++ b/ql/src/test/results/clientpositive/literal_decimal.q.out @@ -16,10 +16,10 @@ STAGE PLANS: Select Operator expressions: -1 (type: int), 0 (type: int), 1 (type: int), 3.14 (type: decimal(3,2)), -3.14 (type: decimal(3,2)), 99999999999999999 (type: bigint), 99999999999999999.9999999999999 (type: decimal(30,13)), null (type: void) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 500 Data size: 178000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 178004 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 1 - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT -1BD, 0BD, 1BD, 3.14BD, -3.14BD, 99999999999999999BD, 99999999999999999.9999999999999BD, 1E99BD FROM src LIMIT 1 diff --git a/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out index e75bec6..2dc3b5a 100644 --- a/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out @@ -3333,10 +3333,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3353,10 +3353,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3401,34 +3401,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 8 Execution mode: llap @@ -3437,34 +3437,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 6 Vertex: Union 6 @@ -3541,10 +3541,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3561,10 +3561,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3611,34 +3611,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 8 Execution mode: llap @@ -3647,34 +3647,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 6 Vertex: Union 6 @@ -3754,10 +3754,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3792,10 +3792,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3806,49 +3806,49 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 5 Reducer 2 Execution mode: llap @@ -3901,49 +3901,49 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 5 Union 3 Vertex: Union 3 @@ -5717,10 +5717,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -5737,10 +5737,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -5766,34 +5766,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 7 Execution mode: llap @@ -5802,34 +5802,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 5 Vertex: Union 5 diff --git a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out index 2c95a6d..043a7c2 100644 --- a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out @@ -3088,10 +3088,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3108,10 +3108,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3156,34 +3156,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 8 Execution mode: vectorized, llap @@ -3192,34 +3192,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 6 Vertex: Union 6 @@ -3296,10 +3296,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3316,10 +3316,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3366,34 +3366,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 8 Execution mode: vectorized, llap @@ -3402,34 +3402,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 6 Vertex: Union 6 @@ -3509,10 +3509,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3547,10 +3547,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -3561,49 +3561,49 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 5 Reducer 2 Execution mode: vectorized, llap @@ -3656,49 +3656,49 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 5 Union 3 Vertex: Union 3 @@ -5347,10 +5347,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -5367,10 +5367,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: llap LLAP IO: no inputs @@ -5396,34 +5396,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 7 Execution mode: vectorized, llap @@ -5432,34 +5432,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 5 Vertex: Union 5 diff --git a/ql/src/test/results/clientpositive/metadataonly1.q.out b/ql/src/test/results/clientpositive/metadataonly1.q.out index 4b4c57c..ece4ba0 100644 --- a/ql/src/test/results/clientpositive/metadataonly1.q.out +++ b/ql/src/test/results/clientpositive/metadataonly1.q.out @@ -30,11 +30,11 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator null sort order: sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col0 (type: string) auto parallelism: false @@ -44,13 +44,13 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -114,11 +114,11 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator null sort order: sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col0 (type: string) auto parallelism: false @@ -177,13 +177,13 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -511,11 +511,11 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator null sort order: sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col0 (type: string) auto parallelism: false @@ -620,11 +620,11 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator isSamplingPred: false predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -669,7 +669,7 @@ STAGE PLANS: null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE tag: 1 auto parallelism: false Path -> Alias: @@ -1421,11 +1421,11 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator null sort order: sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col0 (type: string) auto parallelism: false @@ -1530,13 +1530,13 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat diff --git a/ql/src/test/results/clientpositive/num_op_type_conv.q.out b/ql/src/test/results/clientpositive/num_op_type_conv.q.out index 013a153..00a4ca9 100644 --- a/ql/src/test/results/clientpositive/num_op_type_conv.q.out +++ b/ql/src/test/results/clientpositive/num_op_type_conv.q.out @@ -22,10 +22,10 @@ STAGE PLANS: Select Operator expressions: null (type: double), null (type: double), null (type: double), 1 (type: bigint), 0 (type: bigint), 0.0 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 500 Data size: 12000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 12024 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 1 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT null + 7, 1.0 - null, null + null, diff --git a/ql/src/test/results/clientpositive/perf/query13.q.out b/ql/src/test/results/clientpositive/perf/query13.q.out index 1b63a17..33f30b5 100644 --- a/ql/src/test/results/clientpositive/perf/query13.q.out +++ b/ql/src/test/results/clientpositive/perf/query13.q.out @@ -122,11 +122,11 @@ Stage-0 Stage-1 Reducer 3 File Output Operator [FS_43] - Group By Operator [GBY_41] (rows=1 width=344) + Group By Operator [GBY_41] (rows=1 width=764) Output:["_col0","_col1","_col2","_col3"],aggregations:["avg(VALUE._col0)","avg(VALUE._col1)","avg(VALUE._col2)","sum(VALUE._col3)"] <-Reducer 2 [SIMPLE_EDGE] SHUFFLE [RS_40] - Group By Operator [GBY_39] (rows=1 width=112) + Group By Operator [GBY_39] (rows=1 width=764) Output:["_col0","_col1","_col2","_col3"],aggregations:["avg(_col7)","avg(_col9)","avg(_col10)","sum(_col10)"] Merge Join Operator [MERGEJOIN_74] (rows=130450332 width=88) Conds:RS_35._col0=RS_36._col0(Inner),Output:["_col7","_col9","_col10"] diff --git a/ql/src/test/results/clientpositive/perf/query28.q.out b/ql/src/test/results/clientpositive/perf/query28.q.out index d7aca74..8a0f038 100644 --- a/ql/src/test/results/clientpositive/perf/query28.q.out +++ b/ql/src/test/results/clientpositive/perf/query28.q.out @@ -118,15 +118,15 @@ Stage-0 Stage-1 Reducer 3 File Output Operator [FS_51] - Limit [LIM_50] (rows=1 width=817) + Limit [LIM_50] (rows=1 width=2497) Number of rows:100 - Select Operator [SEL_49] (rows=1 width=817) + Select Operator [SEL_49] (rows=1 width=2497) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17"] - Merge Join Operator [MERGEJOIN_58] (rows=1 width=817) + Merge Join Operator [MERGEJOIN_58] (rows=1 width=2497) Conds:(Inner),(Inner),(Inner),(Inner),(Inner),Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17"] <-Reducer 11 [SIMPLE_EDGE] SHUFFLE [RS_46] - Group By Operator [GBY_33] (rows=1 width=136) + Group By Operator [GBY_33] (rows=1 width=416) Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"] <-Map 10 [SIMPLE_EDGE] SHUFFLE [RS_32] @@ -140,7 +140,7 @@ Stage-0 default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"] <-Reducer 13 [SIMPLE_EDGE] SHUFFLE [RS_47] - Group By Operator [GBY_40] (rows=1 width=136) + Group By Operator [GBY_40] (rows=1 width=416) Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"] <-Map 12 [SIMPLE_EDGE] SHUFFLE [RS_39] @@ -154,7 +154,7 @@ Stage-0 default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"] <-Reducer 2 [SIMPLE_EDGE] SHUFFLE [RS_42] - Group By Operator [GBY_5] (rows=1 width=136) + Group By Operator [GBY_5] (rows=1 width=416) Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"] <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_4] @@ -168,7 +168,7 @@ Stage-0 default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"] <-Reducer 5 [SIMPLE_EDGE] SHUFFLE [RS_43] - Group By Operator [GBY_12] (rows=1 width=136) + Group By Operator [GBY_12] (rows=1 width=416) Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"] <-Map 4 [SIMPLE_EDGE] SHUFFLE [RS_11] @@ -182,7 +182,7 @@ Stage-0 default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"] <-Reducer 7 [SIMPLE_EDGE] SHUFFLE [RS_44] - Group By Operator [GBY_19] (rows=1 width=136) + Group By Operator [GBY_19] (rows=1 width=416) Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"] <-Map 6 [SIMPLE_EDGE] SHUFFLE [RS_18] @@ -196,7 +196,7 @@ Stage-0 default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"] <-Reducer 9 [SIMPLE_EDGE] SHUFFLE [RS_45] - Group By Operator [GBY_26] (rows=1 width=136) + Group By Operator [GBY_26] (rows=1 width=416) Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"] <-Map 8 [SIMPLE_EDGE] SHUFFLE [RS_25] diff --git a/ql/src/test/results/clientpositive/reduceSinkDeDuplication_pRS_key_empty.q.out b/ql/src/test/results/clientpositive/reduceSinkDeDuplication_pRS_key_empty.q.out index 4a848f2..3ad09e8 100644 --- a/ql/src/test/results/clientpositive/reduceSinkDeDuplication_pRS_key_empty.q.out +++ b/ql/src/test/results/clientpositive/reduceSinkDeDuplication_pRS_key_empty.q.out @@ -67,15 +67,15 @@ STAGE PLANS: aggregations: avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0) mode: complete outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: compute_stats(_col0, 16), compute_stats(_col1, 16), compute_stats(_col2, 16), compute_stats(_col3, 16) mode: complete outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 2004 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 2004 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -157,7 +157,7 @@ STAGE PLANS: aggregations: avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0) mode: complete outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -171,17 +171,17 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: string), _col2 (type: double), _col3 (type: double) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0, 16), compute_stats(VALUE._col2, 16), compute_stats(VALUE._col3, 16), compute_stats(VALUE._col4, 16) mode: complete outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 2004 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 2004 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/remove_exprs_stats.q.out b/ql/src/test/results/clientpositive/remove_exprs_stats.q.out index e29fb4e..ae2aa37 100644 --- a/ql/src/test/results/clientpositive/remove_exprs_stats.q.out +++ b/ql/src/test/results/clientpositive/remove_exprs_stats.q.out @@ -76,11 +76,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- always false @@ -99,7 +99,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -137,11 +137,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- always false @@ -160,7 +160,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -198,7 +198,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid < 6) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE @@ -236,7 +236,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -274,11 +274,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- nothing to do @@ -297,7 +297,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid >= 6) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE @@ -335,7 +335,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE @@ -373,7 +373,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid > 1) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE @@ -411,7 +411,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid <= 1) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE @@ -449,11 +449,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- 5 should stay @@ -472,17 +472,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid) IN (5) (type: boolean) - Statistics: Num rows: 5 Data size: 498 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 498 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 498 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -510,17 +510,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid) IN (5, 2, 3) (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -548,17 +548,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid) IN (1, 6) (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -586,7 +586,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: false (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE diff --git a/ql/src/test/results/clientpositive/spark/annotate_stats_join.q.out b/ql/src/test/results/clientpositive/spark/annotate_stats_join.q.out index 30d10f7..bf9998e 100644 --- a/ql/src/test/results/clientpositive/spark/annotate_stats_join.q.out +++ b/ql/src/test/results/clientpositive/spark/annotate_stats_join.q.out @@ -611,19 +611,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: l - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: locid is not null (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col2 (type: bigint), _col3 (type: int) Reducer 2 Reduce Operator Tree: @@ -709,19 +709,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: l - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (state is not null and locid is not null) (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: int) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: int) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8 Data size: 816 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: bigint), _col3 (type: int) Reducer 2 Reduce Operator Tree: diff --git a/ql/src/test/results/clientpositive/spark/groupby3.q.out b/ql/src/test/results/clientpositive/spark/groupby3.q.out index e48018c..23871ba 100644 --- a/ql/src/test/results/clientpositive/spark/groupby3.q.out +++ b/ql/src/test/results/clientpositive/spark/groupby3.q.out @@ -65,10 +65,10 @@ STAGE PLANS: aggregations: sum(KEY._col0:0._col0), avg(KEY._col0:0._col0), avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), min(KEY._col0:0._col0), std(KEY._col0:0._col0), stddev_samp(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0) mode: partial1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: struct), _col2 (type: struct), _col3 (type: string), _col4 (type: string), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct), _col8 (type: struct) Reducer 3 Reduce Operator Tree: @@ -76,14 +76,14 @@ STAGE PLANS: aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8) mode: final outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/groupby3_map.q.out b/ql/src/test/results/clientpositive/spark/groupby3_map.q.out index f806303..71f8dc0 100644 --- a/ql/src/test/results/clientpositive/spark/groupby3_map.q.out +++ b/ql/src/test/results/clientpositive/spark/groupby3_map.q.out @@ -70,14 +70,14 @@ STAGE PLANS: aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(DISTINCT KEY._col0:0._col0), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1216 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1216 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1216 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/groupby3_map_multi_distinct.q.out b/ql/src/test/results/clientpositive/spark/groupby3_map_multi_distinct.q.out index 3b31dfe..47ef5cb 100644 --- a/ql/src/test/results/clientpositive/spark/groupby3_map_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/spark/groupby3_map_multi_distinct.q.out @@ -74,14 +74,14 @@ STAGE PLANS: aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(DISTINCT KEY._col0:0._col0), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8), sum(DISTINCT KEY._col0:1._col0), count(DISTINCT KEY._col0:2._col0) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1 Data size: 248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1232 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double), _col9 (type: double), UDFToDouble(_col10) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1 Data size: 248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1232 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1232 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/groupby3_map_skew.q.out b/ql/src/test/results/clientpositive/spark/groupby3_map_skew.q.out index bbad6e7..7cfca81 100644 --- a/ql/src/test/results/clientpositive/spark/groupby3_map_skew.q.out +++ b/ql/src/test/results/clientpositive/spark/groupby3_map_skew.q.out @@ -72,10 +72,10 @@ STAGE PLANS: aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(DISTINCT KEY._col0:0._col0), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8) mode: partials outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: struct), _col2 (type: struct), _col3 (type: string), _col4 (type: string), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct), _col8 (type: struct) Reducer 3 Reduce Operator Tree: @@ -83,14 +83,14 @@ STAGE PLANS: aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8) mode: final outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1464 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/groupby3_noskew.q.out b/ql/src/test/results/clientpositive/spark/groupby3_noskew.q.out index 6868eff..b2993a6 100644 --- a/ql/src/test/results/clientpositive/spark/groupby3_noskew.q.out +++ b/ql/src/test/results/clientpositive/spark/groupby3_noskew.q.out @@ -63,14 +63,14 @@ STAGE PLANS: aggregations: sum(KEY._col0:0._col0), avg(KEY._col0:0._col0), avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), min(KEY._col0:0._col0), std(KEY._col0:0._col0), stddev_samp(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0) mode: complete outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/groupby3_noskew_multi_distinct.q.out b/ql/src/test/results/clientpositive/spark/groupby3_noskew_multi_distinct.q.out index 399bfd8..d152a07 100644 --- a/ql/src/test/results/clientpositive/spark/groupby3_noskew_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/spark/groupby3_noskew_multi_distinct.q.out @@ -67,14 +67,14 @@ STAGE PLANS: aggregations: sum(KEY._col0:0._col0), avg(KEY._col0:0._col0), avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), min(KEY._col0:0._col0), std(KEY._col0:0._col0), stddev_samp(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0), sum(DISTINCT KEY._col0:1._col0), count(DISTINCT KEY._col0:2._col0) mode: complete outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1 Data size: 240 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double), _col9 (type: double), UDFToDouble(_col10) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1 Data size: 240 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 240 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/subquery_in.q.out b/ql/src/test/results/clientpositive/spark/subquery_in.q.out index f290c02..bfade76 100644 --- a/ql/src/test/results/clientpositive/spark/subquery_in.q.out +++ b/ql/src/test/results/clientpositive/spark/subquery_in.q.out @@ -354,10 +354,10 @@ STAGE PLANS: aggregations: avg(_col0) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct) Reducer 5 Reduce Operator Tree: @@ -365,20 +365,20 @@ STAGE PLANS: aggregations: avg(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 Fetch Operator diff --git a/ql/src/test/results/clientpositive/spark/union_remove_6_subq.q.out b/ql/src/test/results/clientpositive/spark/union_remove_6_subq.q.out index 0a6dc87..dac8247 100644 --- a/ql/src/test/results/clientpositive/spark/union_remove_6_subq.q.out +++ b/ql/src/test/results/clientpositive/spark/union_remove_6_subq.q.out @@ -299,10 +299,10 @@ STAGE PLANS: aggregations: avg(_col0) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct) Reducer 3 Reduce Operator Tree: @@ -329,10 +329,10 @@ STAGE PLANS: aggregations: avg(_col0) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct) Stage: Stage-0 diff --git a/ql/src/test/results/clientpositive/spark/vectorization_0.q.out b/ql/src/test/results/clientpositive/spark/vectorization_0.q.out index 7d81739..22fe7cd 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_0.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_0.q.out @@ -242,10 +242,10 @@ STAGE PLANS: aggregations: avg(ctinyint), variance(ctinyint), var_pop(ctinyint), var_samp(ctinyint), std(ctinyint), stddev(ctinyint), stddev_pop(ctinyint), stddev_samp(ctinyint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reducer 2 @@ -254,11 +254,11 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double) Reducer 3 Execution mode: vectorized @@ -266,10 +266,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: double), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -548,10 +548,10 @@ STAGE PLANS: aggregations: avg(cbigint), variance(cbigint), var_pop(cbigint), var_samp(cbigint), std(cbigint), stddev(cbigint), stddev_pop(cbigint), stddev_samp(cbigint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reducer 2 @@ -560,11 +560,11 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double) Reducer 3 Execution mode: vectorized @@ -572,10 +572,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: double), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -854,10 +854,10 @@ STAGE PLANS: aggregations: avg(cfloat), variance(cfloat), var_pop(cfloat), var_samp(cfloat), std(cfloat), stddev(cfloat), stddev_pop(cfloat), stddev_samp(cfloat) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reducer 2 @@ -866,11 +866,11 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double) Reducer 3 Execution mode: vectorized @@ -878,10 +878,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: double), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1004,10 +1004,10 @@ STAGE PLANS: aggregations: avg(cbigint), stddev_pop(cbigint), var_samp(cbigint), count(), sum(cfloat), min(ctinyint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: bigint), _col4 (type: double), _col5 (type: tinyint) Execution mode: vectorized Reducer 2 @@ -1016,14 +1016,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), stddev_pop(VALUE._col1), var_samp(VALUE._col2), count(VALUE._col3), sum(VALUE._col4), min(VALUE._col5) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (- _col0) (type: double), (-6432.0 + _col0) (type: double), _col1 (type: double), (- (-6432.0 + _col0)) (type: double), ((- (-6432.0 + _col0)) + (-6432.0 + _col0)) (type: double), _col2 (type: double), (- (-6432.0 + _col0)) (type: double), (-6432.0 + (- (-6432.0 + _col0))) (type: double), (- (-6432.0 + _col0)) (type: double), ((- (-6432.0 + _col0)) / (- (-6432.0 + _col0))) (type: double), _col3 (type: bigint), _col4 (type: double), (_col2 % _col1) (type: double), (- _col2) (type: double), ((- (-6432.0 + _col0)) * (- _col0)) (type: double), _col5 (type: tinyint), (- _col5) (type: tinyint) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/vectorization_pushdown.q.out b/ql/src/test/results/clientpositive/spark/vectorization_pushdown.q.out index ef7df76..59a3be0 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_pushdown.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_pushdown.q.out @@ -31,10 +31,10 @@ STAGE PLANS: aggregations: avg(cbigint) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct) Execution mode: vectorized Reducer 2 @@ -43,10 +43,10 @@ STAGE PLANS: aggregations: avg(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out index 6e443f8..2347731 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out @@ -161,10 +161,10 @@ STAGE PLANS: aggregations: avg(cint), sum(cdouble), stddev_pop(cint), stddev_samp(csmallint), var_samp(cint), avg(cfloat), stddev_samp(cint), min(ctinyint), count(csmallint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: double), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: tinyint), _col8 (type: bigint) Execution mode: vectorized Reducer 2 @@ -173,14 +173,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), sum(VALUE._col1), stddev_pop(VALUE._col2), stddev_samp(VALUE._col3), var_samp(VALUE._col4), avg(VALUE._col5), stddev_samp(VALUE._col6), min(VALUE._col7), count(VALUE._col8) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (_col0 + -3728.0) (type: double), (- (_col0 + -3728.0)) (type: double), (- (- (_col0 + -3728.0))) (type: double), ((- (- (_col0 + -3728.0))) * (_col0 + -3728.0)) (type: double), _col1 (type: double), (- _col0) (type: double), _col2 (type: double), (((- (- (_col0 + -3728.0))) * (_col0 + -3728.0)) * (- (- (_col0 + -3728.0)))) (type: double), _col3 (type: double), (- _col2) (type: double), (_col2 - (- (- (_col0 + -3728.0)))) (type: double), ((_col2 - (- (- (_col0 + -3728.0)))) * _col2) (type: double), _col4 (type: double), _col5 (type: double), (10.175 - _col4) (type: double), (- (10.175 - _col4)) (type: double), ((- _col2) / -563.0) (type: double), _col6 (type: double), (- ((- _col2) / -563.0)) (type: double), (_col0 / _col1) (type: double), _col7 (type: tinyint), _col8 (type: bigint), (UDFToDouble(_col7) / ((- _col2) / -563.0)) (type: double), (- (_col0 / _col1)) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24 - Statistics: Num rows: 1 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -372,10 +372,10 @@ STAGE PLANS: aggregations: max(cint), var_pop(cbigint), stddev_pop(csmallint), max(cdouble), avg(ctinyint), min(cint), min(cdouble), stddev_samp(csmallint), var_samp(cint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: struct), _col2 (type: struct), _col3 (type: double), _col4 (type: struct), _col5 (type: int), _col6 (type: double), _col7 (type: struct), _col8 (type: struct) Execution mode: vectorized Reducer 2 @@ -384,14 +384,14 @@ STAGE PLANS: aggregations: max(VALUE._col0), var_pop(VALUE._col1), stddev_pop(VALUE._col2), max(VALUE._col3), avg(VALUE._col4), min(VALUE._col5), min(VALUE._col6), stddev_samp(VALUE._col7), var_samp(VALUE._col8) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), (UDFToDouble(_col0) / -3728.0) (type: double), (_col0 * -3728) (type: int), _col1 (type: double), (- (_col0 * -3728)) (type: int), _col2 (type: double), (-563 % (_col0 * -3728)) (type: int), (_col1 / _col2) (type: double), (- _col2) (type: double), _col3 (type: double), _col4 (type: double), (_col2 - 10.175) (type: double), _col5 (type: int), (UDFToDouble((_col0 * -3728)) % (_col2 - 10.175)) (type: double), (- _col3) (type: double), _col6 (type: double), (_col3 % -26.28) (type: double), _col7 (type: double), (- (UDFToDouble(_col0) / -3728.0)) (type: double), ((- (_col0 * -3728)) % (-563 % (_col0 * -3728))) (type: int), ((UDFToDouble(_col0) / -3728.0) - _col4) (type: double), (- (_col0 * -3728)) (type: int), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -575,10 +575,10 @@ STAGE PLANS: aggregations: var_pop(cbigint), count(), max(ctinyint), stddev_pop(csmallint), max(cint), stddev_samp(cdouble), count(ctinyint), avg(ctinyint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: bigint), _col2 (type: tinyint), _col3 (type: struct), _col4 (type: int), _col5 (type: struct), _col6 (type: bigint), _col7 (type: struct) Execution mode: vectorized Reducer 2 @@ -587,14 +587,14 @@ STAGE PLANS: aggregations: var_pop(VALUE._col0), count(VALUE._col1), max(VALUE._col2), stddev_pop(VALUE._col3), max(VALUE._col4), stddev_samp(VALUE._col5), count(VALUE._col6), avg(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (- _col0) (type: double), (_col0 - (- _col0)) (type: double), _col1 (type: bigint), (CAST( _col1 AS decimal(19,0)) % 79.553) (type: decimal(5,3)), _col2 (type: tinyint), (UDFToDouble(_col1) - (- _col0)) (type: double), (- (- _col0)) (type: double), (-1.0 % (- _col0)) (type: double), _col1 (type: bigint), (- _col1) (type: bigint), _col3 (type: double), (- (- (- _col0))) (type: double), (762 * (- _col1)) (type: bigint), _col4 (type: int), (UDFToLong(_col2) + (762 * (- _col1))) (type: bigint), ((- _col0) + UDFToDouble(_col4)) (type: double), _col5 (type: double), ((- _col1) % _col1) (type: bigint), _col6 (type: bigint), _col7 (type: double), (-3728 % (UDFToLong(_col2) + (762 * (- _col1)))) (type: bigint) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21 - Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -757,10 +757,10 @@ STAGE PLANS: aggregations: avg(ctinyint), max(cbigint), stddev_samp(cint), var_pop(cint), var_pop(cbigint), max(cfloat) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: bigint), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: float) Execution mode: vectorized Reducer 2 @@ -769,14 +769,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), max(VALUE._col1), stddev_samp(VALUE._col2), var_pop(VALUE._col3), var_pop(VALUE._col4), max(VALUE._col5) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (_col0 + 6981.0) (type: double), ((_col0 + 6981.0) + _col0) (type: double), _col1 (type: bigint), (((_col0 + 6981.0) + _col0) / _col0) (type: double), (- (_col0 + 6981.0)) (type: double), _col2 (type: double), (_col0 % (- (_col0 + 6981.0))) (type: double), _col3 (type: double), _col4 (type: double), (- _col1) (type: bigint), (UDFToDouble((- _col1)) / _col2) (type: double), _col5 (type: float), (_col4 * -26.28) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out b/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out index 9f98c4f..e56a4ff 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out @@ -76,10 +76,10 @@ STAGE PLANS: aggregations: count(_col0), max(_col1), min(_col0), avg(_col2) mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: int), _col2 (type: int), _col3 (type: struct) Execution mode: vectorized Local Work: @@ -90,10 +90,10 @@ STAGE PLANS: aggregations: count(VALUE._col0), max(VALUE._col1), min(VALUE._col2), avg(VALUE._col3) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out b/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out index 9503e6b..4f8fc7a 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out @@ -77,10 +77,10 @@ STAGE PLANS: aggregations: count(_col0), max(_col1), min(_col0), avg(_col2) mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: int), _col2 (type: int), _col3 (type: struct) Reducer 3 Reduce Operator Tree: @@ -88,11 +88,11 @@ STAGE PLANS: aggregations: count(VALUE._col0), max(VALUE._col1), min(VALUE._col2), avg(VALUE._col3) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: double) Reducer 4 Execution mode: vectorized @@ -100,10 +100,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out index 31ab885..5eb896f 100644 --- a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out @@ -821,10 +821,10 @@ STAGE PLANS: aggregations: avg(ctimestamp1), variance(ctimestamp1), var_pop(ctimestamp1), var_samp(ctimestamp1), std(ctimestamp1), stddev(ctimestamp1), stddev_pop(ctimestamp1), stddev_samp(ctimestamp1) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reducer 2 @@ -833,14 +833,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: round(_col0, 0) (type: double), _col1 BETWEEN 8.97077295279421E19 AND 8.97077295279422E19 (type: boolean), _col2 BETWEEN 8.97077295279421E19 AND 8.97077295279422E19 (type: boolean), _col3 BETWEEN 9.20684592523616E19 AND 9.20684592523617E19 (type: boolean), round(_col4, 3) (type: double), round(_col5, 3) (type: double), round(_col6, 3) (type: double), round(_col7, 3) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/stats_list_bucket.q.out b/ql/src/test/results/clientpositive/stats_list_bucket.q.out index c66da97..c34c414 100644 --- a/ql/src/test/results/clientpositive/stats_list_bucket.q.out +++ b/ql/src/test/results/clientpositive/stats_list_bucket.q.out @@ -168,7 +168,7 @@ Stored As SubDirectories: Yes Skewed Columns: [c1, c2] Skewed Values: [[466, val_466], [287, val_287], [82, val_82]] #### A masked pattern was here #### -Skewed Value to Truncated Path: {[466, val_466]=/stats_list_bucket_1/c1=466/c2=val_466, [82, val_82]=/stats_list_bucket_1/c1=82/c2=val_82, [287, val_287]=/stats_list_bucket_1/c1=287/c2=val_287} +Skewed Value to Truncated Path: {[466, val_466]=/stats_list_bucket_1/c1=466/c2=val_466, [287, val_287]=/stats_list_bucket_1/c1=287/c2=val_287, [82, val_82]=/stats_list_bucket_1/c1=82/c2=val_82} Storage Desc Params: serialization.format 1 PREHOOK: query: drop table stats_list_bucket diff --git a/ql/src/test/results/clientpositive/subquery_in.q.out b/ql/src/test/results/clientpositive/subquery_in.q.out index abf87d0..27434bd 100644 --- a/ql/src/test/results/clientpositive/subquery_in.q.out +++ b/ql/src/test/results/clientpositive/subquery_in.q.out @@ -297,7 +297,7 @@ STAGE PLANS: aggregations: avg(_col0) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -311,22 +311,22 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct) Reduce Operator Tree: Group By Operator aggregations: avg(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -358,7 +358,7 @@ STAGE PLANS: key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/subquery_multiinsert.q.out b/ql/src/test/results/clientpositive/subquery_multiinsert.q.out index 63f93fb..fd35547 100644 --- a/ql/src/test/results/clientpositive/subquery_multiinsert.q.out +++ b/ql/src/test/results/clientpositive/subquery_multiinsert.q.out @@ -527,13 +527,13 @@ STAGE DEPENDENCIES: Stage-17 has a backup stage: Stage-2 Stage-13 depends on stages: Stage-17 Stage-15 depends on stages: Stage-2, Stage-13 - Stage-12 depends on stages: Stage-15 - Stage-0 depends on stages: Stage-12 - Stage-7 depends on stages: Stage-0 - Stage-16 depends on stages: Stage-2, Stage-13 - Stage-4 depends on stages: Stage-16 + Stage-4 depends on stages: Stage-15 Stage-1 depends on stages: Stage-4 Stage-5 depends on stages: Stage-1 + Stage-16 depends on stages: Stage-2, Stage-13 + Stage-12 depends on stages: Stage-16 + Stage-0 depends on stages: Stage-12 + Stage-7 depends on stages: Stage-0 Stage-2 STAGE PLANS: @@ -632,70 +632,6 @@ STAGE PLANS: Stage: Stage-15 Map Reduce Local Work Alias -> Map Local Tables: - sq_1:a - Fetch Operator - limit: -1 - Alias -> Map Local Operator Tree: - sq_1:a - TableScan - alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: ((key > '9') and value is not null) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string), _col1 (type: string) - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - HashTable Sink Operator - keys: - 0 key (type: string), value (type: string) - 1 _col0 (type: string), _col1 (type: string) - - Stage: Stage-12 - Map Reduce - Map Operator Tree: - TableScan - Map Join Operator - condition map: - Left Semi Join 0 to 1 - keys: - 0 key (type: string), value (type: string) - 1 _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.src_4 - Local Work: - Map Reduce Local Work - - Stage: Stage-0 - Move Operator - tables: - replace: true - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.src_4 - - Stage: Stage-7 - Stats-Aggr Operator - - Stage: Stage-16 - Map Reduce Local Work - Alias -> Map Local Tables: sq_2:s1 Fetch Operator limit: -1 @@ -769,6 +705,70 @@ STAGE PLANS: Stage: Stage-5 Stats-Aggr Operator + Stage: Stage-16 + Map Reduce Local Work + Alias -> Map Local Tables: + sq_1:a + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + sq_1:a + TableScan + alias: a + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key > '9') and value is not null) (type: boolean) + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: string), _col1 (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 key (type: string), value (type: string) + 1 _col0 (type: string), _col1 (type: string) + + Stage: Stage-12 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: string), value (type: string) + 1 _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.src_4 + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Move Operator + tables: + replace: true + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.src_4 + + Stage: Stage-7 + Stats-Aggr Operator + Stage: Stage-2 Map Reduce Map Operator Tree: @@ -852,12 +852,12 @@ RUN: Stage-17:MAPREDLOCAL RUN: Stage-13:MAPRED RUN: Stage-15:MAPREDLOCAL RUN: Stage-16:MAPREDLOCAL -RUN: Stage-12:MAPRED RUN: Stage-4:MAPRED -RUN: Stage-0:MOVE +RUN: Stage-12:MAPRED RUN: Stage-1:MOVE -RUN: Stage-7:STATS +RUN: Stage-0:MOVE RUN: Stage-5:STATS +RUN: Stage-7:STATS PREHOOK: query: select * from src_4 PREHOOK: type: QUERY PREHOOK: Input: default@src_4 diff --git a/ql/src/test/results/clientpositive/subquery_notin.q.out b/ql/src/test/results/clientpositive/subquery_notin.q.out index f00ae1c..ec3c153 100644 --- a/ql/src/test/results/clientpositive/subquery_notin.q.out +++ b/ql/src/test/results/clientpositive/subquery_notin.q.out @@ -649,7 +649,7 @@ STAGE PLANS: aggregations: avg(_col0) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -663,29 +663,29 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct) Reduce Operator Tree: Group By Operator aggregations: avg(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is null (type: boolean) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: complete outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col0 = 0) (type: boolean) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -710,7 +710,7 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -719,7 +719,7 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1 - Statistics: Num rows: 26 Data size: 3381 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 5149 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -735,14 +735,14 @@ STAGE PLANS: key expressions: UDFToDouble(_col1) (type: double) sort order: + Map-reduce partition columns: UDFToDouble(_col1) (type: double) - Statistics: Num rows: 26 Data size: 3381 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 5149 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: int) TableScan Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -751,17 +751,17 @@ STAGE PLANS: 0 UDFToDouble(_col1) (type: double) 1 _col0 (type: double) outputColumnNames: _col0, _col1, _col3 - Statistics: Num rows: 28 Data size: 3719 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5663 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col3 is null (type: boolean) - Statistics: Num rows: 14 Data size: 1859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2831 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 1859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2831 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 1859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2831 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -816,7 +816,7 @@ STAGE PLANS: aggregations: avg(_col0) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -830,14 +830,14 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct) Reduce Operator Tree: Group By Operator aggregations: avg(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: diff --git a/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out index 89987aa..9105bd3 100644 --- a/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out @@ -3177,10 +3177,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 7 Map Operator Tree: @@ -3195,10 +3195,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 2 Reduce Operator Tree: @@ -3238,34 +3238,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 8 Reduce Operator Tree: @@ -3273,34 +3273,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 6 Vertex: Union 6 @@ -3375,10 +3375,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 7 Map Operator Tree: @@ -3393,10 +3393,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 2 Reduce Operator Tree: @@ -3438,34 +3438,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 8 Reduce Operator Tree: @@ -3473,34 +3473,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 6 Vertex: Union 6 @@ -3578,10 +3578,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 5 Map Operator Tree: @@ -3612,10 +3612,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 11 Reduce Operator Tree: @@ -3623,49 +3623,49 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 5 Reducer 2 Reduce Operator Tree: @@ -3714,49 +3714,49 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 5 Union 3 Vertex: Union 3 @@ -5472,10 +5472,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 6 Map Operator Tree: @@ -5490,10 +5490,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 2 Reduce Operator Tree: @@ -5515,34 +5515,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 7 Reduce Operator Tree: @@ -5550,34 +5550,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 5 Vertex: Union 5 diff --git a/ql/src/test/results/clientpositive/tez/explainuser_1.q.out b/ql/src/test/results/clientpositive/tez/explainuser_1.q.out index b8383fd..2e83ea3 100644 --- a/ql/src/test/results/clientpositive/tez/explainuser_1.q.out +++ b/ql/src/test/results/clientpositive/tez/explainuser_1.q.out @@ -300,18 +300,18 @@ Stage-0 Stage-1 Reducer 2 File Output Operator [FS_7] - Select Operator [SEL_5] (rows=10 width=88) + Select Operator [SEL_5] (rows=10 width=97) Output:["_col0","_col1","_col2"] - Group By Operator [GBY_4] (rows=10 width=91) + Group By Operator [GBY_4] (rows=10 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_3] PartitionCols:_col0, _col1, _col2 - Group By Operator [GBY_2] (rows=10 width=91) + Group By Operator [GBY_2] (rows=10 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float - Select Operator [SEL_1] (rows=20 width=83) + Select Operator [SEL_1] (rows=20 width=88) Output:["key","c_int","c_float"] - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select x, y, count(*) from (select key, (c_int+c_float+1+2) as x, sum(c_int) as y from cbo_t1 group by c_float, cbo_t1.c_int, key) R group by y, x @@ -339,18 +339,18 @@ Stage-0 PartitionCols:_col0, _col1 Group By Operator [GBY_8] (rows=5 width=20) Output:["_col0","_col1","_col2"],aggregations:["count()"],keys:_col0, _col1 - Select Operator [SEL_5] (rows=10 width=91) + Select Operator [SEL_5] (rows=10 width=101) Output:["_col0","_col1"] - Group By Operator [GBY_4] (rows=10 width=91) + Group By Operator [GBY_4] (rows=10 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_3] PartitionCols:_col0, _col1, _col2 - Group By Operator [GBY_2] (rows=10 width=91) + Group By Operator [GBY_2] (rows=10 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float - Select Operator [SEL_1] (rows=20 width=83) + Select Operator [SEL_1] (rows=20 width=88) Output:["key","c_int","c_float"] - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select cbo_t3.c_int, c, count(*) from (select key as a, c_int+1 as b, sum(c_int) as c from cbo_t1 where (cbo_t1.c_int + 1 >= 0) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0) group by c_float, cbo_t1.c_int, key order by a) cbo_t1 join (select key as p, c_int+1 as q, sum(c_int) as r from cbo_t2 where (cbo_t2.c_int + 1 >= 0) and (cbo_t2.c_int > 0 or cbo_t2.c_float >= 0) group by c_float, cbo_t2.c_int, key order by q/10 desc, r asc) cbo_t2 on cbo_t1.a=p join cbo_t3 on cbo_t1.a=key where (b + cbo_t2.q >= 0) and (b > 0 or c_int >= 0) group by cbo_t3.c_int, c order by cbo_t3.c_int+c desc, c @@ -397,11 +397,11 @@ Stage-0 <-Map 11 [SIMPLE_EDGE] SHUFFLE [RS_31] PartitionCols:_col0 - Select Operator [SEL_29] (rows=18 width=79) + Select Operator [SEL_29] (rows=18 width=84) Output:["_col0","_col1"] - Filter Operator [FIL_50] (rows=18 width=79) + Filter Operator [FIL_50] (rows=18 width=84) predicate:key is not null - TableScan [TS_27] (rows=20 width=80) + TableScan [TS_27] (rows=20 width=84) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Reducer 4 [SIMPLE_EDGE] SHUFFLE [RS_30] @@ -428,9 +428,9 @@ Stage-0 PartitionCols:_col0, _col1, _col2 Group By Operator [GBY_14] (rows=2 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float - Filter Operator [FIL_49] (rows=5 width=74) + Filter Operator [FIL_49] (rows=5 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and key is not null) - TableScan [TS_11] (rows=20 width=83) + TableScan [TS_11] (rows=20 width=88) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Reducer 3 [SIMPLE_EDGE] SHUFFLE [RS_22] @@ -448,9 +448,9 @@ Stage-0 PartitionCols:_col0, _col1, _col2 Group By Operator [GBY_3] (rows=2 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float - Filter Operator [FIL_48] (rows=5 width=74) + Filter Operator [FIL_48] (rows=5 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and key is not null) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select cbo_t3.c_int, c, count(*) from (select key as a, c_int+1 as b, sum(c_int) as c from cbo_t1 where (cbo_t1.c_int + 1 >= 0) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0) group by c_float, cbo_t1.c_int, key having cbo_t1.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0 order by b % c asc, b desc) cbo_t1 left outer join (select key as p, c_int+1 as q, sum(c_int) as r from cbo_t2 where (cbo_t2.c_int + 1 >= 0) and (cbo_t2.c_int > 0 or cbo_t2.c_float >= 0) group by c_float, cbo_t2.c_int, key having cbo_t2.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0) cbo_t2 on cbo_t1.a=p left outer join cbo_t3 on cbo_t1.a=key where (b + cbo_t2.q >= 0) and (b > 0 or c_int >= 0) group by cbo_t3.c_int, c having cbo_t3.c_int > 0 and (c_int >=1 or c >= 1) and (c_int + c) >= 0 order by cbo_t3.c_int % c asc, cbo_t3.c_int desc @@ -494,11 +494,11 @@ Stage-0 <-Map 10 [SIMPLE_EDGE] SHUFFLE [RS_28] PartitionCols:_col0 - Select Operator [SEL_26] (rows=18 width=79) + Select Operator [SEL_26] (rows=18 width=84) Output:["_col0","_col1"] - Filter Operator [FIL_46] (rows=18 width=79) + Filter Operator [FIL_46] (rows=18 width=84) predicate:((c_int > 0) and key is not null) - TableScan [TS_24] (rows=20 width=80) + TableScan [TS_24] (rows=20 width=84) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Reducer 4 [SIMPLE_EDGE] SHUFFLE [RS_27] @@ -527,7 +527,7 @@ Stage-0 Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float Filter Operator [FIL_44] (rows=1 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and ((UDFToFloat(c_int) + c_float) >= 0.0) and key is not null) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Reducer 9 [SIMPLE_EDGE] SHUFFLE [RS_20] @@ -543,7 +543,7 @@ Stage-0 Output:["_col0","_col1","_col2"],keys:key, c_int, c_float Filter Operator [FIL_45] (rows=1 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and ((UDFToFloat(c_int) + c_float) >= 0.0) and key is not null) - TableScan [TS_11] (rows=20 width=83) + TableScan [TS_11] (rows=20 width=88) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select cbo_t3.c_int, c, count(*) from (select key as a, c_int+1 as b, sum(c_int) as c from cbo_t1 where (cbo_t1.c_int + 1 >= 0) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0) group by c_float, cbo_t1.c_int, key having cbo_t1.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0 order by b+c, a desc) cbo_t1 right outer join (select key as p, c_int+1 as q, sum(c_int) as r from cbo_t2 where (cbo_t2.c_int + 1 >= 0) and (cbo_t2.c_int > 0 or cbo_t2.c_float >= 0) group by c_float, cbo_t2.c_int, key having cbo_t2.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0) cbo_t2 on cbo_t1.a=p right outer join cbo_t3 on cbo_t1.a=key where (b + cbo_t2.q >= 2) and (b > 0 or c_int >= 0) group by cbo_t3.c_int, c @@ -583,9 +583,9 @@ Stage-0 <-Map 8 [SIMPLE_EDGE] SHUFFLE [RS_23] PartitionCols:_col0 - Select Operator [SEL_20] (rows=20 width=80) + Select Operator [SEL_20] (rows=20 width=84) Output:["_col0","_col1"] - TableScan [TS_19] (rows=20 width=80) + TableScan [TS_19] (rows=20 width=84) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Reducer 3 [SIMPLE_EDGE] SHUFFLE [RS_21] @@ -605,7 +605,7 @@ Stage-0 Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float Filter Operator [FIL_34] (rows=1 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and ((UDFToFloat(c_int) + c_float) >= 0.0)) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Reducer 7 [SIMPLE_EDGE] SHUFFLE [RS_22] @@ -621,7 +621,7 @@ Stage-0 Output:["_col0","_col1","_col2"],keys:key, c_int, c_float Filter Operator [FIL_35] (rows=1 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and ((UDFToFloat(c_int) + c_float) >= 0.0)) - TableScan [TS_11] (rows=20 width=83) + TableScan [TS_11] (rows=20 width=88) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select cbo_t3.c_int, c, count(*) from (select key as a, c_int+1 as b, sum(c_int) as c from cbo_t1 where (cbo_t1.c_int + 1 >= 0) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0) group by c_float, cbo_t1.c_int, key having cbo_t1.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0 order by c+a desc) cbo_t1 full outer join (select key as p, c_int+1 as q, sum(c_int) as r from cbo_t2 where (cbo_t2.c_int + 1 >= 0) and (cbo_t2.c_int > 0 or cbo_t2.c_float >= 0) group by c_float, cbo_t2.c_int, key having cbo_t2.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0 order by p+q desc, r asc) cbo_t2 on cbo_t1.a=p full outer join cbo_t3 on cbo_t1.a=key where (b + cbo_t2.q >= 0) and (b > 0 or c_int >= 0) group by cbo_t3.c_int, c having cbo_t3.c_int > 0 and (c_int >=1 or c >= 1) and (c_int + c) >= 0 order by cbo_t3.c_int @@ -665,11 +665,11 @@ Stage-0 <-Map 10 [SIMPLE_EDGE] SHUFFLE [RS_27] PartitionCols:_col0 - Select Operator [SEL_24] (rows=20 width=80) + Select Operator [SEL_24] (rows=20 width=84) Output:["_col0","_col1"] - Filter Operator [FIL_41] (rows=20 width=80) + Filter Operator [FIL_41] (rows=20 width=84) predicate:(c_int > 0) - TableScan [TS_22] (rows=20 width=80) + TableScan [TS_22] (rows=20 width=84) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Reducer 3 [SIMPLE_EDGE] SHUFFLE [RS_25] @@ -689,7 +689,7 @@ Stage-0 Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float Filter Operator [FIL_39] (rows=1 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and ((UDFToFloat(c_int) + c_float) >= 0.0)) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Reducer 9 [SIMPLE_EDGE] SHUFFLE [RS_26] @@ -709,7 +709,7 @@ Stage-0 Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float Filter Operator [FIL_40] (rows=1 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and ((UDFToFloat(c_int) + c_float) >= 0.0)) - TableScan [TS_11] (rows=20 width=83) + TableScan [TS_11] (rows=20 width=88) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select cbo_t3.c_int, c, count(*) from (select key as a, c_int+1 as b, sum(c_int) as c from cbo_t1 where (cbo_t1.c_int + 1 >= 0) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0) group by c_float, cbo_t1.c_int, key having cbo_t1.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0) cbo_t1 join (select key as p, c_int+1 as q, sum(c_int) as r from cbo_t2 where (cbo_t2.c_int + 1 >= 0) and (cbo_t2.c_int > 0 or cbo_t2.c_float >= 0) group by c_float, cbo_t2.c_int, key having cbo_t2.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0) cbo_t2 on cbo_t1.a=p join cbo_t3 on cbo_t1.a=key where (b + cbo_t2.q >= 0) and (b > 0 or c_int >= 0) group by cbo_t3.c_int, c @@ -749,11 +749,11 @@ Stage-0 <-Map 8 [SIMPLE_EDGE] SHUFFLE [RS_25] PartitionCols:_col0 - Select Operator [SEL_23] (rows=18 width=79) + Select Operator [SEL_23] (rows=18 width=84) Output:["_col0","_col1"] - Filter Operator [FIL_41] (rows=18 width=79) + Filter Operator [FIL_41] (rows=18 width=84) predicate:key is not null - TableScan [TS_21] (rows=20 width=80) + TableScan [TS_21] (rows=20 width=84) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Reducer 3 [SIMPLE_EDGE] SHUFFLE [RS_24] @@ -778,7 +778,7 @@ Stage-0 Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float Filter Operator [FIL_39] (rows=1 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and ((UDFToFloat(c_int) + c_float) >= 0.0) and key is not null) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Reducer 7 [SIMPLE_EDGE] SHUFFLE [RS_17] @@ -794,7 +794,7 @@ Stage-0 Output:["_col0","_col1","_col2"],keys:key, c_int, c_float Filter Operator [FIL_40] (rows=1 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and ((UDFToFloat(c_int) + c_float) >= 0.0) and key is not null) - TableScan [TS_8] (rows=20 width=83) + TableScan [TS_8] (rows=20 width=88) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select unionsrc.key FROM (select 'tst1' as key, count(1) as value from src) unionsrc @@ -864,9 +864,9 @@ Stage-0 SHUFFLE [RS_3] Group By Operator [GBY_2] (rows=1 width=8) Output:["_col0"],aggregations:["count(key)"] - Select Operator [SEL_1] (rows=20 width=76) + Select Operator [SEL_1] (rows=20 width=80) Output:["key"] - TableScan [TS_0] (rows=20 width=76) + TableScan [TS_0] (rows=20 width=80) default@cbo_t3,s1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] <-Reducer 6 [CONTAINS] Reduce Output Operator [RS_24] @@ -878,9 +878,9 @@ Stage-0 SHUFFLE [RS_10] Group By Operator [GBY_9] (rows=1 width=8) Output:["_col0"],aggregations:["count(key)"] - Select Operator [SEL_8] (rows=20 width=76) + Select Operator [SEL_8] (rows=20 width=80) Output:["key"] - TableScan [TS_7] (rows=20 width=76) + TableScan [TS_7] (rows=20 width=80) default@cbo_t3,s1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] <-Reducer 8 [CONTAINS] Reduce Output Operator [RS_24] @@ -892,9 +892,9 @@ Stage-0 SHUFFLE [RS_19] Group By Operator [GBY_18] (rows=1 width=8) Output:["_col0"],aggregations:["count(key)"] - Select Operator [SEL_17] (rows=20 width=76) + Select Operator [SEL_17] (rows=20 width=80) Output:["key"] - TableScan [TS_16] (rows=20 width=76) + TableScan [TS_16] (rows=20 width=80) default@cbo_t3,s1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] PREHOOK: query: explain select unionsrc.key, count(1) FROM (select 'max' as key, max(c_int) as value from cbo_t3 s1 @@ -944,9 +944,9 @@ Stage-0 SHUFFLE [RS_3] Group By Operator [GBY_2] (rows=1 width=8) Output:["_col0"],aggregations:["count(key)"] - Select Operator [SEL_1] (rows=20 width=76) + Select Operator [SEL_1] (rows=20 width=80) Output:["key"] - TableScan [TS_0] (rows=20 width=76) + TableScan [TS_0] (rows=20 width=80) default@cbo_t3,s1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] <-Reducer 7 [CONTAINS] Reduce Output Operator [RS_26] @@ -961,9 +961,9 @@ Stage-0 SHUFFLE [RS_10] Group By Operator [GBY_9] (rows=1 width=8) Output:["_col0"],aggregations:["count(key)"] - Select Operator [SEL_8] (rows=20 width=76) + Select Operator [SEL_8] (rows=20 width=80) Output:["key"] - TableScan [TS_7] (rows=20 width=76) + TableScan [TS_7] (rows=20 width=80) default@cbo_t3,s1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] <-Reducer 9 [CONTAINS] Reduce Output Operator [RS_26] @@ -978,9 +978,9 @@ Stage-0 SHUFFLE [RS_19] Group By Operator [GBY_18] (rows=1 width=8) Output:["_col0"],aggregations:["count(key)"] - Select Operator [SEL_17] (rows=20 width=76) + Select Operator [SEL_17] (rows=20 width=80) Output:["key"] - TableScan [TS_16] (rows=20 width=76) + TableScan [TS_16] (rows=20 width=80) default@cbo_t3,s1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] PREHOOK: query: explain select cbo_t1.key from cbo_t1 join cbo_t3 where cbo_t1.key=cbo_t3.key and cbo_t1.key >= 1 @@ -1003,20 +1003,20 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_6] PartitionCols:_col0 - Select Operator [SEL_2] (rows=6 width=70) + Select Operator [SEL_2] (rows=6 width=85) Output:["_col0"] - Filter Operator [FIL_13] (rows=6 width=70) + Filter Operator [FIL_13] (rows=6 width=85) predicate:(UDFToDouble(key) >= 1.0) - TableScan [TS_0] (rows=20 width=76) + TableScan [TS_0] (rows=20 width=80) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] <-Map 3 [SIMPLE_EDGE] SHUFFLE [RS_7] PartitionCols:_col0 - Select Operator [SEL_5] (rows=6 width=70) + Select Operator [SEL_5] (rows=6 width=85) Output:["_col0"] - Filter Operator [FIL_14] (rows=6 width=70) + Filter Operator [FIL_14] (rows=6 width=85) predicate:(UDFToDouble(key) >= 1.0) - TableScan [TS_3] (rows=20 width=76) + TableScan [TS_3] (rows=20 width=80) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] PREHOOK: query: explain select cbo_t1.c_int, cbo_t2.c_int from cbo_t1 left outer join cbo_t2 on cbo_t1.key=cbo_t2.key @@ -1041,16 +1041,16 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_4] PartitionCols:_col0 - Select Operator [SEL_1] (rows=20 width=80) + Select Operator [SEL_1] (rows=20 width=84) Output:["_col0","_col1"] - TableScan [TS_0] (rows=20 width=80) + TableScan [TS_0] (rows=20 width=84) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Map 3 [SIMPLE_EDGE] SHUFFLE [RS_5] PartitionCols:_col0 - Select Operator [SEL_3] (rows=20 width=80) + Select Operator [SEL_3] (rows=20 width=84) Output:["_col0","_col1"] - TableScan [TS_2] (rows=20 width=80) + TableScan [TS_2] (rows=20 width=84) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] PREHOOK: query: explain select cbo_t1.c_int, cbo_t2.c_int from cbo_t1 full outer join cbo_t2 on cbo_t1.key=cbo_t2.key @@ -1075,16 +1075,16 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_4] PartitionCols:_col0 - Select Operator [SEL_1] (rows=20 width=80) + Select Operator [SEL_1] (rows=20 width=84) Output:["_col0","_col1"] - TableScan [TS_0] (rows=20 width=80) + TableScan [TS_0] (rows=20 width=84) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Map 3 [SIMPLE_EDGE] SHUFFLE [RS_5] PartitionCols:_col0 - Select Operator [SEL_3] (rows=20 width=80) + Select Operator [SEL_3] (rows=20 width=84) Output:["_col0","_col1"] - TableScan [TS_2] (rows=20 width=80) + TableScan [TS_2] (rows=20 width=84) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] PREHOOK: query: explain select b, cbo_t1.c, cbo_t2.p, q, cbo_t3.c_int from (select key as a, c_int as b, cbo_t1.c_float as c from cbo_t1) cbo_t1 join (select cbo_t2.key as p, cbo_t2.c_int as q, c_float as r from cbo_t2) cbo_t2 on cbo_t1.a=p join cbo_t3 on cbo_t1.a=key @@ -1109,29 +1109,29 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_9] PartitionCols:_col0 - Select Operator [SEL_2] (rows=18 width=82) + Select Operator [SEL_2] (rows=18 width=87) Output:["_col0","_col1","_col2"] - Filter Operator [FIL_21] (rows=18 width=82) + Filter Operator [FIL_21] (rows=18 width=87) predicate:key is not null - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Map 3 [SIMPLE_EDGE] SHUFFLE [RS_10] PartitionCols:_col0 - Select Operator [SEL_5] (rows=18 width=79) + Select Operator [SEL_5] (rows=18 width=84) Output:["_col0","_col1"] - Filter Operator [FIL_22] (rows=18 width=79) + Filter Operator [FIL_22] (rows=18 width=84) predicate:key is not null - TableScan [TS_3] (rows=20 width=80) + TableScan [TS_3] (rows=20 width=84) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Map 4 [SIMPLE_EDGE] SHUFFLE [RS_11] PartitionCols:_col0 - Select Operator [SEL_8] (rows=18 width=79) + Select Operator [SEL_8] (rows=18 width=84) Output:["_col0","_col1"] - Filter Operator [FIL_23] (rows=18 width=79) + Filter Operator [FIL_23] (rows=18 width=84) predicate:key is not null - TableScan [TS_6] (rows=20 width=80) + TableScan [TS_6] (rows=20 width=84) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] PREHOOK: query: explain select key, cbo_t1.c_int, cbo_t2.p, q from cbo_t1 join (select cbo_t2.key as p, cbo_t2.c_int as q, c_float as r from cbo_t2) cbo_t2 on cbo_t1.key=p join (select key as a, c_int as b, cbo_t3.c_float as c from cbo_t3)cbo_t3 on cbo_t1.key=a @@ -1156,29 +1156,29 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_9] PartitionCols:_col0 - Select Operator [SEL_2] (rows=18 width=79) + Select Operator [SEL_2] (rows=18 width=84) Output:["_col0","_col1"] - Filter Operator [FIL_21] (rows=18 width=79) + Filter Operator [FIL_21] (rows=18 width=84) predicate:key is not null - TableScan [TS_0] (rows=20 width=80) + TableScan [TS_0] (rows=20 width=84) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Map 3 [SIMPLE_EDGE] SHUFFLE [RS_10] PartitionCols:_col0 - Select Operator [SEL_5] (rows=18 width=75) + Select Operator [SEL_5] (rows=18 width=80) Output:["_col0"] - Filter Operator [FIL_22] (rows=18 width=75) + Filter Operator [FIL_22] (rows=18 width=80) predicate:key is not null - TableScan [TS_3] (rows=20 width=76) + TableScan [TS_3] (rows=20 width=80) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] <-Map 4 [SIMPLE_EDGE] SHUFFLE [RS_11] PartitionCols:_col0 - Select Operator [SEL_8] (rows=18 width=79) + Select Operator [SEL_8] (rows=18 width=84) Output:["_col0","_col1"] - Filter Operator [FIL_23] (rows=18 width=79) + Filter Operator [FIL_23] (rows=18 width=84) predicate:key is not null - TableScan [TS_6] (rows=20 width=80) + TableScan [TS_6] (rows=20 width=84) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] PREHOOK: query: explain select * from (select q, b, cbo_t2.p, cbo_t1.c, cbo_t3.c_int from (select key as a, c_int as b, cbo_t1.c_float as c from cbo_t1 where (cbo_t1.c_int + 1 == 2) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0)) cbo_t1 full outer join (select cbo_t2.key as p, cbo_t2.c_int as q, c_float as r from cbo_t2 where (cbo_t2.c_int + 1 == 2) and (cbo_t2.c_int > 0 or cbo_t2.c_float >= 0)) cbo_t2 on cbo_t1.a=p join cbo_t3 on cbo_t1.a=key where (b + cbo_t2.q == 2) and (b > 0 or c_int >= 0)) R where (q + 1 = 2) and (R.b > 0 or c_int >= 0) @@ -1206,11 +1206,11 @@ Stage-0 <-Map 5 [SIMPLE_EDGE] SHUFFLE [RS_15] PartitionCols:_col0 - Select Operator [SEL_13] (rows=18 width=79) + Select Operator [SEL_13] (rows=18 width=84) Output:["_col0","_col1"] - Filter Operator [FIL_26] (rows=18 width=79) + Filter Operator [FIL_26] (rows=18 width=84) predicate:key is not null - TableScan [TS_11] (rows=20 width=80) + TableScan [TS_11] (rows=20 width=84) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Reducer 2 [SIMPLE_EDGE] SHUFFLE [RS_14] @@ -1222,20 +1222,20 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_6] PartitionCols:_col0 - Select Operator [SEL_2] (rows=9 width=82) + Select Operator [SEL_2] (rows=9 width=93) Output:["_col0","_col1","_col2"] - Filter Operator [FIL_24] (rows=9 width=82) + Filter Operator [FIL_24] (rows=9 width=93) predicate:(((c_int + 1) = 2) and ((c_int > 0) or (c_float >= 0)) and key is not null) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Map 4 [SIMPLE_EDGE] SHUFFLE [RS_7] PartitionCols:_col0 - Select Operator [SEL_5] (rows=9 width=79) + Select Operator [SEL_5] (rows=9 width=89) Output:["_col0","_col1"] - Filter Operator [FIL_25] (rows=9 width=82) + Filter Operator [FIL_25] (rows=9 width=93) predicate:(((c_int + 1) = 2) and ((c_int > 0) or (c_float >= 0)) and key is not null) - TableScan [TS_3] (rows=20 width=83) + TableScan [TS_3] (rows=20 width=88) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select * from (select q, b, cbo_t2.p, cbo_t1.c, cbo_t3.c_int from (select key as a, c_int as b, cbo_t1.c_float as c from cbo_t1 where (cbo_t1.c_int + 1 == 2) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0)) cbo_t1 right outer join (select cbo_t2.key as p, cbo_t2.c_int as q, c_float as r from cbo_t2 where (cbo_t2.c_int + 1 == 2) and (cbo_t2.c_int > 0 or cbo_t2.c_float >= 0)) cbo_t2 on cbo_t1.a=p right outer join cbo_t3 on cbo_t1.a=key where (b + cbo_t2.q == 2) and (b > 0 or c_int >= 0)) R where (q + 1 = 2) and (R.b > 0 or c_int >= 0) @@ -1262,27 +1262,27 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_8] PartitionCols:_col0 - Select Operator [SEL_2] (rows=10 width=83) + Select Operator [SEL_2] (rows=10 width=93) Output:["_col0","_col1","_col2"] - Filter Operator [FIL_17] (rows=10 width=83) + Filter Operator [FIL_17] (rows=10 width=93) predicate:(((c_int + 1) = 2) and ((c_int > 0) or (c_float >= 0))) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Map 3 [SIMPLE_EDGE] SHUFFLE [RS_9] PartitionCols:_col0 - Select Operator [SEL_5] (rows=10 width=80) + Select Operator [SEL_5] (rows=10 width=89) Output:["_col0","_col1"] - Filter Operator [FIL_18] (rows=10 width=83) + Filter Operator [FIL_18] (rows=10 width=93) predicate:(((c_int + 1) = 2) and ((c_int > 0) or (c_float >= 0))) - TableScan [TS_3] (rows=20 width=83) + TableScan [TS_3] (rows=20 width=88) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Map 4 [SIMPLE_EDGE] SHUFFLE [RS_10] PartitionCols:_col0 - Select Operator [SEL_7] (rows=20 width=80) + Select Operator [SEL_7] (rows=20 width=84) Output:["_col0","_col1"] - TableScan [TS_6] (rows=20 width=80) + TableScan [TS_6] (rows=20 width=84) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] PREHOOK: query: explain select key, (c_int+1)+2 as x, sum(c_int) from cbo_t1 group by c_float, cbo_t1.c_int, key order by x limit 1 @@ -1303,22 +1303,22 @@ Stage-0 File Output Operator [FS_10] Limit [LIM_9] (rows=1 width=97) Number of rows:1 - Select Operator [SEL_8] (rows=10 width=88) + Select Operator [SEL_8] (rows=10 width=97) Output:["_col0","_col1","_col2"] <-Reducer 2 [SIMPLE_EDGE] SHUFFLE [RS_7] - Select Operator [SEL_5] (rows=10 width=88) + Select Operator [SEL_5] (rows=10 width=97) Output:["_col0","_col1","_col2"] - Group By Operator [GBY_4] (rows=10 width=91) + Group By Operator [GBY_4] (rows=10 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_3] PartitionCols:_col0, _col1, _col2 - Group By Operator [GBY_2] (rows=10 width=91) + Group By Operator [GBY_2] (rows=10 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float - Select Operator [SEL_1] (rows=20 width=83) + Select Operator [SEL_1] (rows=20 width=88) Output:["key","c_int","c_float"] - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select x, y, count(*) from (select key, (c_int+c_float+1+2) as x, sum(c_int) as y from cbo_t1 group by c_float, cbo_t1.c_int, key) R group by y, x order by x,y limit 1 @@ -1351,18 +1351,18 @@ Stage-0 PartitionCols:_col0, _col1 Group By Operator [GBY_8] (rows=5 width=20) Output:["_col0","_col1","_col2"],aggregations:["count()"],keys:_col1, _col0 - Select Operator [SEL_5] (rows=10 width=91) + Select Operator [SEL_5] (rows=10 width=101) Output:["_col0","_col1"] - Group By Operator [GBY_4] (rows=10 width=91) + Group By Operator [GBY_4] (rows=10 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_3] PartitionCols:_col0, _col1, _col2 - Group By Operator [GBY_2] (rows=10 width=91) + Group By Operator [GBY_2] (rows=10 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float - Select Operator [SEL_1] (rows=20 width=83) + Select Operator [SEL_1] (rows=20 width=88) Output:["key","c_int","c_float"] - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select key from(select key from (select key from cbo_t1 limit 5)cbo_t2 limit 5)cbo_t3 limit 5 @@ -1381,27 +1381,27 @@ Stage-0 Stage-1 Reducer 3 File Output Operator [FS_13] - Limit [LIM_12] (rows=5 width=68) + Limit [LIM_12] (rows=5 width=85) Number of rows:5 - Limit [LIM_10] (rows=5 width=68) + Limit [LIM_10] (rows=5 width=85) Number of rows:5 - Select Operator [SEL_9] (rows=5 width=68) + Select Operator [SEL_9] (rows=5 width=85) Output:["_col0"] <-Reducer 2 [SIMPLE_EDGE] SHUFFLE [RS_8] - Limit [LIM_7] (rows=5 width=68) + Limit [LIM_7] (rows=5 width=85) Number of rows:5 - Limit [LIM_5] (rows=5 width=68) + Limit [LIM_5] (rows=5 width=85) Number of rows:5 - Select Operator [SEL_4] (rows=5 width=68) + Select Operator [SEL_4] (rows=5 width=85) Output:["_col0"] <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_3] - Limit [LIM_2] (rows=5 width=68) + Limit [LIM_2] (rows=5 width=85) Number of rows:5 - Select Operator [SEL_1] (rows=20 width=76) + Select Operator [SEL_1] (rows=20 width=80) Output:["_col0"] - TableScan [TS_0] (rows=20 width=76) + TableScan [TS_0] (rows=20 width=80) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] PREHOOK: query: explain select key, c_int from(select key, c_int from (select key, c_int from cbo_t1 order by c_int limit 5)cbo_t1 order by c_int limit 5)cbo_t2 order by c_int limit 5 @@ -1421,27 +1421,27 @@ Stage-0 Stage-1 Reducer 4 File Output Operator [FS_13] - Limit [LIM_12] (rows=5 width=71) + Limit [LIM_12] (rows=5 width=89) Number of rows:5 - Select Operator [SEL_11] (rows=5 width=71) + Select Operator [SEL_11] (rows=5 width=89) Output:["_col0","_col1"] <-Reducer 3 [SIMPLE_EDGE] SHUFFLE [RS_10] - Limit [LIM_8] (rows=5 width=71) + Limit [LIM_8] (rows=5 width=89) Number of rows:5 - Select Operator [SEL_7] (rows=5 width=71) + Select Operator [SEL_7] (rows=5 width=89) Output:["_col0","_col1"] <-Reducer 2 [SIMPLE_EDGE] SHUFFLE [RS_6] - Limit [LIM_4] (rows=5 width=71) + Limit [LIM_4] (rows=5 width=89) Number of rows:5 - Select Operator [SEL_3] (rows=20 width=80) + Select Operator [SEL_3] (rows=20 width=84) Output:["_col0","_col1"] <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_2] - Select Operator [SEL_1] (rows=20 width=80) + Select Operator [SEL_1] (rows=20 width=84) Output:["_col0","_col1"] - TableScan [TS_0] (rows=20 width=80) + TableScan [TS_0] (rows=20 width=84) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] PREHOOK: query: explain select cbo_t3.c_int, c, count(*) from (select key as a, c_int+1 as b, sum(c_int) as c from cbo_t1 where (cbo_t1.c_int + 1 >= 0) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0) group by c_float, cbo_t1.c_int, key order by a limit 5) cbo_t1 join (select key as p, c_int+1 as q, sum(c_int) as r from cbo_t2 where (cbo_t2.c_int + 1 >= 0) and (cbo_t2.c_int > 0 or cbo_t2.c_float >= 0) group by c_float, cbo_t2.c_int, key order by q/10 desc, r asc limit 5) cbo_t2 on cbo_t1.a=p join cbo_t3 on cbo_t1.a=key where (b + cbo_t2.q >= 0) and (b > 0 or c_int >= 0) group by cbo_t3.c_int, c order by cbo_t3.c_int+c desc, c limit 5 @@ -1490,11 +1490,11 @@ Stage-0 <-Map 11 [SIMPLE_EDGE] SHUFFLE [RS_37] PartitionCols:_col0 - Select Operator [SEL_35] (rows=18 width=79) + Select Operator [SEL_35] (rows=18 width=84) Output:["_col0","_col1"] - Filter Operator [FIL_59] (rows=18 width=79) + Filter Operator [FIL_59] (rows=18 width=84) predicate:key is not null - TableScan [TS_33] (rows=20 width=80) + TableScan [TS_33] (rows=20 width=84) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int"] <-Reducer 4 [SIMPLE_EDGE] SHUFFLE [RS_36] @@ -1508,50 +1508,50 @@ Stage-0 <-Reducer 10 [SIMPLE_EDGE] SHUFFLE [RS_29] PartitionCols:_col0 - Filter Operator [FIL_26] (rows=2 width=62) + Filter Operator [FIL_26] (rows=2 width=105) predicate:_col0 is not null - Limit [LIM_24] (rows=3 width=76) + Limit [LIM_24] (rows=3 width=105) Number of rows:5 - Select Operator [SEL_23] (rows=3 width=76) + Select Operator [SEL_23] (rows=3 width=105) Output:["_col0","_col1"] <-Reducer 9 [SIMPLE_EDGE] SHUFFLE [RS_22] - Select Operator [SEL_20] (rows=3 width=76) + Select Operator [SEL_20] (rows=3 width=105) Output:["_col0","_col1","_col2","_col3"] - Group By Operator [GBY_19] (rows=3 width=70) + Group By Operator [GBY_19] (rows=3 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2 <-Map 8 [SIMPLE_EDGE] SHUFFLE [RS_18] PartitionCols:_col0, _col1, _col2 - Group By Operator [GBY_17] (rows=3 width=70) + Group By Operator [GBY_17] (rows=3 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float - Filter Operator [FIL_58] (rows=6 width=77) + Filter Operator [FIL_58] (rows=6 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0))) - TableScan [TS_14] (rows=20 width=83) + TableScan [TS_14] (rows=20 width=88) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Reducer 3 [SIMPLE_EDGE] SHUFFLE [RS_28] PartitionCols:_col0 - Filter Operator [FIL_12] (rows=2 width=54) + Filter Operator [FIL_12] (rows=2 width=97) predicate:_col0 is not null - Limit [LIM_10] (rows=3 width=68) + Limit [LIM_10] (rows=3 width=97) Number of rows:5 - Select Operator [SEL_9] (rows=3 width=68) + Select Operator [SEL_9] (rows=3 width=97) Output:["_col0","_col1","_col2"] <-Reducer 2 [SIMPLE_EDGE] SHUFFLE [RS_8] - Select Operator [SEL_6] (rows=3 width=68) + Select Operator [SEL_6] (rows=3 width=97) Output:["_col0","_col1","_col2"] - Group By Operator [GBY_5] (rows=3 width=70) + Group By Operator [GBY_5] (rows=3 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_4] PartitionCols:_col0, _col1, _col2 - Group By Operator [GBY_3] (rows=3 width=70) + Group By Operator [GBY_3] (rows=3 width=101) Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float - Filter Operator [FIL_56] (rows=6 width=77) + Filter Operator [FIL_56] (rows=6 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0))) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select cbo_t1.c_int from cbo_t1 left semi join cbo_t2 on cbo_t1.key=cbo_t2.key where (cbo_t1.c_int + 1 == 2) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0) @@ -1576,22 +1576,22 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_8] PartitionCols:_col0 - Select Operator [SEL_2] (rows=9 width=82) + Select Operator [SEL_2] (rows=9 width=93) Output:["_col0","_col1"] - Filter Operator [FIL_15] (rows=9 width=82) + Filter Operator [FIL_15] (rows=9 width=93) predicate:(((c_int + 1) = 2) and ((c_int > 0) or (c_float >= 0)) and key is not null) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Map 3 [SIMPLE_EDGE] SHUFFLE [RS_9] PartitionCols:_col0 - Group By Operator [GBY_7] (rows=5 width=68) + Group By Operator [GBY_7] (rows=5 width=85) Output:["_col0"],keys:_col0 - Select Operator [SEL_5] (rows=18 width=75) + Select Operator [SEL_5] (rows=18 width=80) Output:["_col0"] - Filter Operator [FIL_16] (rows=18 width=75) + Filter Operator [FIL_16] (rows=18 width=80) predicate:key is not null - TableScan [TS_3] (rows=20 width=76) + TableScan [TS_3] (rows=20 width=80) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] PREHOOK: query: explain select * from (select c, b, a from (select key as a, c_int as b, cbo_t1.c_float as c from cbo_t1 where (cbo_t1.c_int + 1 == 2) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0)) cbo_t1 left semi join (select cbo_t2.key as p, cbo_t2.c_int as q, c_float as r from cbo_t2 where (cbo_t2.c_int + 1 == 2) and (cbo_t2.c_int > 0 or cbo_t2.c_float >= 0)) cbo_t2 on cbo_t1.a=p left semi join cbo_t3 on cbo_t1.a=key where (b + 1 == 2) and (b > 0 or c >= 0)) R where (b + 1 = 2) and (R.b > 0 or c >= 0) @@ -1616,33 +1616,33 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_13] PartitionCols:_col0 - Select Operator [SEL_2] (rows=9 width=82) + Select Operator [SEL_2] (rows=9 width=93) Output:["_col0","_col1","_col2"] - Filter Operator [FIL_25] (rows=9 width=82) + Filter Operator [FIL_25] (rows=9 width=93) predicate:(((c_int + 1) = 2) and ((c_int > 0) or (c_float >= 0)) and key is not null) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Map 3 [SIMPLE_EDGE] SHUFFLE [RS_14] PartitionCols:_col0 Group By Operator [GBY_10] (rows=3 width=85) Output:["_col0"],keys:_col0 - Select Operator [SEL_5] (rows=9 width=75) + Select Operator [SEL_5] (rows=9 width=85) Output:["_col0"] - Filter Operator [FIL_26] (rows=9 width=82) + Filter Operator [FIL_26] (rows=9 width=93) predicate:(((c_int + 1) = 2) and ((c_int > 0) or (c_float >= 0)) and key is not null) - TableScan [TS_3] (rows=20 width=83) + TableScan [TS_3] (rows=20 width=88) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Map 4 [SIMPLE_EDGE] SHUFFLE [RS_15] PartitionCols:_col0 - Group By Operator [GBY_12] (rows=6 width=70) + Group By Operator [GBY_12] (rows=6 width=85) Output:["_col0"],keys:_col0 - Select Operator [SEL_8] (rows=18 width=75) + Select Operator [SEL_8] (rows=18 width=80) Output:["_col0"] - Filter Operator [FIL_27] (rows=18 width=75) + Filter Operator [FIL_27] (rows=18 width=80) predicate:key is not null - TableScan [TS_6] (rows=20 width=76) + TableScan [TS_6] (rows=20 width=80) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] PREHOOK: query: explain select a, c, count(*) from (select key as a, c_int+1 as b, sum(c_int) as c from cbo_t1 where (cbo_t1.c_int + 1 >= 0) and (cbo_t1.c_int > 0 or cbo_t1.c_float >= 0) group by c_float, cbo_t1.c_int, key having cbo_t1.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0 order by a+b desc, c asc) cbo_t1 left semi join (select key as p, c_int+1 as q, sum(c_int) as r from cbo_t2 where (cbo_t2.c_int + 1 >= 0) and (cbo_t2.c_int > 0 or cbo_t2.c_float >= 0) group by c_float, cbo_t2.c_int, key having cbo_t2.c_float > 0 and (c_int >=1 or c_float >= 1) and (c_int + c_float) >= 0 order by q+r/10 desc, p) cbo_t2 on cbo_t1.a=p left semi join cbo_t3 on cbo_t1.a=key where (b + 1 >= 0) and (b > 0 or a >= 0) group by a, c having a > 0 and (a >=1 or c >= 1) and (a + c) >= 0 order by c, a @@ -1684,13 +1684,13 @@ Stage-0 <-Map 10 [SIMPLE_EDGE] SHUFFLE [RS_32] PartitionCols:_col0 - Group By Operator [GBY_29] (rows=3 width=56) + Group By Operator [GBY_29] (rows=3 width=85) Output:["_col0"],keys:_col0 - Select Operator [SEL_25] (rows=6 width=70) + Select Operator [SEL_25] (rows=6 width=85) Output:["_col0"] - Filter Operator [FIL_50] (rows=6 width=70) + Filter Operator [FIL_50] (rows=6 width=85) predicate:(UDFToDouble(key) > 0.0) - TableScan [TS_23] (rows=20 width=76) + TableScan [TS_23] (rows=20 width=80) default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key"] <-Reducer 3 [SIMPLE_EDGE] SHUFFLE [RS_30] @@ -1714,7 +1714,7 @@ Stage-0 Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float Filter Operator [FIL_48] (rows=1 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and ((UDFToFloat(c_int) + c_float) >= 0.0) and (((c_int + 1) + 1) >= 0) and (((c_int + 1) > 0) or (UDFToDouble(key) >= 0.0)) and (UDFToDouble(key) > 0.0)) - TableScan [TS_0] (rows=20 width=83) + TableScan [TS_0] (rows=20 width=88) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] <-Reducer 9 [SIMPLE_EDGE] SHUFFLE [RS_31] @@ -1736,7 +1736,7 @@ Stage-0 Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, c_int, c_float Filter Operator [FIL_49] (rows=1 width=93) predicate:(((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and ((UDFToFloat(c_int) + c_float) >= 0.0) and (UDFToDouble(key) > 0.0)) - TableScan [TS_12] (rows=20 width=83) + TableScan [TS_12] (rows=20 width=88) default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"] PREHOOK: query: explain select cbo_t1.key as x, c_int as c_int, (((c_int+c_float)*10)+5) as y from cbo_t1 @@ -2280,7 +2280,7 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_4] PartitionCols:_col0, _col1 - Group By Operator [GBY_3] (rows=13 width=219) + Group By Operator [GBY_3] (rows=13 width=295) Output:["_col0","_col1","_col2"],aggregations:["avg(p_size)"],keys:p_name, p_mfgr Filter Operator [FIL_24] (rows=26 width=223) predicate:p_name is not null @@ -2518,7 +2518,7 @@ Stage-0 Output:["_col0"],aggregations:["avg(VALUE._col0)"] <-Map 5 [SIMPLE_EDGE] SHUFFLE [RS_6] - Group By Operator [GBY_5] (rows=1 width=0) + Group By Operator [GBY_5] (rows=1 width=76) Output:["_col0"],aggregations:["avg(p_size)"] Filter Operator [FIL_37] (rows=8 width=4) predicate:(p_size < 10) @@ -2531,7 +2531,7 @@ Stage-0 Output:["_col0"],aggregations:["avg(VALUE._col0)"] <-Map 7 [SIMPLE_EDGE] SHUFFLE [RS_22] - Group By Operator [GBY_21] (rows=1 width=0) + Group By Operator [GBY_21] (rows=1 width=76) Output:["_col0"],aggregations:["avg(p_size)"] Filter Operator [FIL_39] (rows=8 width=4) predicate:(p_size < 10) @@ -2716,14 +2716,14 @@ Stage-0 File Output Operator [FS_6] Select Operator [SEL_4] (rows=20 width=64) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10"] - PTF Operator [PTF_3] (rows=20 width=612) + PTF Operator [PTF_3] (rows=20 width=621) Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}] - Select Operator [SEL_2] (rows=20 width=612) + Select Operator [SEL_2] (rows=20 width=621) Output:["_col0","_col1","_col2","_col3"] <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_1] PartitionCols:key - TableScan [TS_0] (rows=20 width=160) + TableScan [TS_0] (rows=20 width=169) default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["c_float","c_int","key","value"] PREHOOK: query: explain select *, rank() over(partition by key order by value) as rr from src1 diff --git a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out index 345d5f7..ff62691 100644 --- a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out +++ b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out @@ -197,11 +197,11 @@ Stage-2 Stage-0 Reducer 2 File Output Operator [FS_6] - Group By Operator [GBY_4] (rows=1 width=0) + Group By Operator [GBY_4] (rows=1 width=984) Output:["_col0","_col1"],aggregations:["compute_stats(VALUE._col0)","compute_stats(VALUE._col1)"] <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_3] - Group By Operator [GBY_2] (rows=1 width=0) + Group By Operator [GBY_2] (rows=1 width=984) Output:["_col0","_col1"],aggregations:["compute_stats(key, 16)","compute_stats(value, 16)"] Select Operator [SEL_1] (rows=500 width=10) Output:["key","value"] diff --git a/ql/src/test/results/clientpositive/tez/groupby3.q.out b/ql/src/test/results/clientpositive/tez/groupby3.q.out index c46d1df..771a0ed 100644 --- a/ql/src/test/results/clientpositive/tez/groupby3.q.out +++ b/ql/src/test/results/clientpositive/tez/groupby3.q.out @@ -67,10 +67,10 @@ STAGE PLANS: aggregations: sum(KEY._col0:0._col0), avg(KEY._col0:0._col0), avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), min(KEY._col0:0._col0), std(KEY._col0:0._col0), stddev_samp(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0) mode: partial1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: struct), _col2 (type: struct), _col3 (type: string), _col4 (type: string), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct), _col8 (type: struct) Reducer 3 Reduce Operator Tree: @@ -78,14 +78,14 @@ STAGE PLANS: aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8) mode: final outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 1208 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/metadataonly1.q.out b/ql/src/test/results/clientpositive/tez/metadataonly1.q.out index 4075b81..a399cce 100644 --- a/ql/src/test/results/clientpositive/tez/metadataonly1.q.out +++ b/ql/src/test/results/clientpositive/tez/metadataonly1.q.out @@ -36,11 +36,11 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator null sort order: sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col0 (type: string) auto parallelism: false @@ -51,13 +51,13 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -127,11 +127,11 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator null sort order: sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col0 (type: string) auto parallelism: false @@ -191,13 +191,13 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -659,11 +659,11 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator null sort order: sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col0 (type: string) auto parallelism: false @@ -823,33 +823,33 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator isSamplingPred: false predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE tag: 1 auto parallelism: true Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: a2 Partition key expr: ds - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Stage: Stage-0 @@ -1405,11 +1405,11 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator null sort order: sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col0 (type: string) auto parallelism: false @@ -1515,13 +1515,13 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat diff --git a/ql/src/test/results/clientpositive/tez/subquery_in.q.out b/ql/src/test/results/clientpositive/tez/subquery_in.q.out index 627adfe..4351338 100644 --- a/ql/src/test/results/clientpositive/tez/subquery_in.q.out +++ b/ql/src/test/results/clientpositive/tez/subquery_in.q.out @@ -357,10 +357,10 @@ STAGE PLANS: aggregations: avg(_col0) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct) Reducer 5 Reduce Operator Tree: @@ -368,20 +368,20 @@ STAGE PLANS: aggregations: avg(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 Fetch Operator diff --git a/ql/src/test/results/clientpositive/tez/vector_aggregate_9.q.out b/ql/src/test/results/clientpositive/tez/vector_aggregate_9.q.out index 833d47e..aadf4a6 100644 --- a/ql/src/test/results/clientpositive/tez/vector_aggregate_9.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_aggregate_9.q.out @@ -132,10 +132,10 @@ STAGE PLANS: aggregations: min(dc), max(dc), sum(dc), avg(dc) mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 624 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 624 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(38,18)), _col1 (type: decimal(38,18)), _col2 (type: decimal(38,18)), _col3 (type: struct) Execution mode: vectorized Reducer 2 @@ -144,10 +144,10 @@ STAGE PLANS: aggregations: min(VALUE._col0), max(VALUE._col1), sum(VALUE._col2), avg(VALUE._col3) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 624 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 624 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_aggregate_without_gby.q.out b/ql/src/test/results/clientpositive/tez/vector_aggregate_without_gby.q.out index 3bbbb46..dfac90b 100644 --- a/ql/src/test/results/clientpositive/tez/vector_aggregate_without_gby.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_aggregate_without_gby.q.out @@ -48,11 +48,11 @@ Stage-0 Stage-1 Reducer 2 vectorized File Output Operator [FS_14] - Group By Operator [GBY_13] (rows=1 width=88) + Group By Operator [GBY_13] (rows=1 width=188) Output:["_col0","_col1"],aggregations:["max(VALUE._col0)","max(VALUE._col1)"] <-Map 1 [SIMPLE_EDGE] vectorized SHUFFLE [RS_12] - Group By Operator [GBY_11] (rows=1 width=88) + Group By Operator [GBY_11] (rows=1 width=188) Output:["_col0","_col1"],aggregations:["max(dt)","max(greg_dt)"] Select Operator [SEL_10] (rows=3 width=102) Output:["dt","greg_dt"] diff --git a/ql/src/test/results/clientpositive/tez/vector_coalesce.q.out b/ql/src/test/results/clientpositive/tez/vector_coalesce.q.out index e65245e..2e48efa 100644 --- a/ql/src/test/results/clientpositive/tez/vector_coalesce.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_coalesce.q.out @@ -30,18 +30,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 1045942 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: cdouble is null (type: boolean) - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3114 Data size: 265164 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: cstring1 (type: string), cint (type: int), cfloat (type: float), csmallint (type: smallint), COALESCE(null,cstring1,cint,cfloat,csmallint) (type: string) outputColumnNames: _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3114 Data size: 819540 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: string), _col2 (type: int), _col3 (type: float), _col4 (type: smallint), _col5 (type: string) sort order: +++++ - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3114 Data size: 819540 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reducer 2 @@ -50,13 +50,13 @@ STAGE PLANS: Select Operator expressions: null (type: double), KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: float), KEY.reducesinkkey3 (type: smallint), KEY.reducesinkkey4 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3114 Data size: 246572 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -122,18 +122,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 146792 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ctinyint is null (type: boolean) - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3115 Data size: 37224 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: cdouble (type: double), cint (type: int), COALESCE(null,(cdouble + log2(cint)),0) (type: double) outputColumnNames: _col1, _col2, _col3 - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3115 Data size: 52844 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: double), _col2 (type: int), _col3 (type: double) sort order: +++ - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3115 Data size: 52844 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reducer 2 @@ -142,13 +142,13 @@ STAGE PLANS: Select Operator expressions: null (type: tinyint), KEY.reducesinkkey0 (type: double), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3115 Data size: 27928 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -214,15 +214,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 110088 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (cfloat is null and cbigint is null) (type: boolean) - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 7092 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 3172 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 3172 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reducer 2 @@ -231,13 +231,13 @@ STAGE PLANS: Select Operator expressions: null (type: float), null (type: bigint), 0.0 (type: float) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 3172 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -303,18 +303,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 983040 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (ctimestamp1 is not null or ctimestamp2 is not null) (type: boolean) - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 983040 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), COALESCE(ctimestamp1,ctimestamp2) (type: timestamp) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 1474560 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: timestamp), _col1 (type: timestamp), _col2 (type: timestamp) sort order: +++ - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 1474560 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reducer 2 @@ -323,13 +323,13 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: timestamp), KEY.reducesinkkey2 (type: timestamp) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 1474560 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1200 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1200 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -395,15 +395,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 110088 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (cfloat is null and cbigint is null) (type: boolean) - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 7092 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reducer 2 @@ -412,13 +412,13 @@ STAGE PLANS: Select Operator expressions: null (type: float), null (type: bigint), null (type: float) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_decimal_precision.q.out b/ql/src/test/results/clientpositive/tez/vector_decimal_precision.q.out index 8bb0934..cb93f63 100644 --- a/ql/src/test/results/clientpositive/tez/vector_decimal_precision.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_decimal_precision.q.out @@ -574,10 +574,10 @@ STAGE PLANS: aggregations: avg(dec), sum(dec) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: decimal(30,10)) Execution mode: vectorized Reducer 2 @@ -586,10 +586,10 @@ STAGE PLANS: aggregations: avg(VALUE._col0), sum(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out b/ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out index 07c45ba..a6d9fb9 100644 --- a/ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out @@ -2500,10 +2500,10 @@ STAGE PLANS: aggregations: histogram_numeric(_col0, 3) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: array) Reducer 2 Reduce Operator Tree: @@ -2511,10 +2511,10 @@ STAGE PLANS: aggregations: histogram_numeric(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 720 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 720 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_interval_arithmetic.q.out b/ql/src/test/results/clientpositive/tez/vector_interval_arithmetic.q.out index 1d3a5f6..82df555 100644 --- a/ql/src/test/results/clientpositive/tez/vector_interval_arithmetic.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_interval_arithmetic.q.out @@ -510,10 +510,10 @@ STAGE PLANS: alias: interval_arithmetic_1 Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 50 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 50 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reducer 2 @@ -522,13 +522,13 @@ STAGE PLANS: Select Operator expressions: 5-5 (type: interval_year_month), -1-1 (type: interval_year_month) outputColumnNames: _col0, _col1 - Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 50 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 2 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1037,13 +1037,13 @@ STAGE PLANS: Select Operator expressions: 109 20:30:40.246913578 (type: interval_day_time), 89 02:14:26.000000000 (type: interval_day_time) outputColumnNames: _col0, _col1 - Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 50 Data size: 1200 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 2 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_null_projection.q.out b/ql/src/test/results/clientpositive/tez/vector_null_projection.q.out index 07c3d60..20fd48b 100644 --- a/ql/src/test/results/clientpositive/tez/vector_null_projection.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_null_projection.q.out @@ -61,10 +61,10 @@ STAGE PLANS: Select Operator expressions: null (type: void) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -111,52 +111,52 @@ STAGE PLANS: alias: a Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator keys: null (type: void) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: void) sort order: + Map-reduce partition columns: _col0 (type: void) - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Map 4 Map Operator Tree: TableScan alias: b Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator keys: null (type: void) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: void) sort order: + Map-reduce partition columns: _col0 (type: void) - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reducer 3 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: void) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: null (type: void) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vectorization_0.q.out b/ql/src/test/results/clientpositive/tez/vectorization_0.q.out index f95733a..0fd2a29 100644 --- a/ql/src/test/results/clientpositive/tez/vectorization_0.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorization_0.q.out @@ -245,10 +245,10 @@ STAGE PLANS: aggregations: avg(ctinyint), variance(ctinyint), var_pop(ctinyint), var_samp(ctinyint), std(ctinyint), stddev(ctinyint), stddev_pop(ctinyint), stddev_samp(ctinyint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reducer 2 @@ -257,11 +257,11 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double) Reducer 3 Execution mode: vectorized @@ -269,10 +269,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: double), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -554,10 +554,10 @@ STAGE PLANS: aggregations: avg(cbigint), variance(cbigint), var_pop(cbigint), var_samp(cbigint), std(cbigint), stddev(cbigint), stddev_pop(cbigint), stddev_samp(cbigint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reducer 2 @@ -566,11 +566,11 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double) Reducer 3 Execution mode: vectorized @@ -578,10 +578,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: double), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -863,10 +863,10 @@ STAGE PLANS: aggregations: avg(cfloat), variance(cfloat), var_pop(cfloat), var_samp(cfloat), std(cfloat), stddev(cfloat), stddev_pop(cfloat), stddev_samp(cfloat) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reducer 2 @@ -875,11 +875,11 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double) Reducer 3 Execution mode: vectorized @@ -887,10 +887,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: double), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1014,10 +1014,10 @@ STAGE PLANS: aggregations: avg(cbigint), stddev_pop(cbigint), var_samp(cbigint), count(), sum(cfloat), min(ctinyint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: bigint), _col4 (type: double), _col5 (type: tinyint) Execution mode: vectorized Reducer 2 @@ -1026,14 +1026,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), stddev_pop(VALUE._col1), var_samp(VALUE._col2), count(VALUE._col3), sum(VALUE._col4), min(VALUE._col5) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (- _col0) (type: double), (-6432.0 + _col0) (type: double), _col1 (type: double), (- (-6432.0 + _col0)) (type: double), ((- (-6432.0 + _col0)) + (-6432.0 + _col0)) (type: double), _col2 (type: double), (- (-6432.0 + _col0)) (type: double), (-6432.0 + (- (-6432.0 + _col0))) (type: double), (- (-6432.0 + _col0)) (type: double), ((- (-6432.0 + _col0)) / (- (-6432.0 + _col0))) (type: double), _col3 (type: bigint), _col4 (type: double), (_col2 % _col1) (type: double), (- _col2) (type: double), ((- (-6432.0 + _col0)) * (- _col0)) (type: double), _col5 (type: tinyint), (- _col5) (type: tinyint) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vectorization_pushdown.q.out b/ql/src/test/results/clientpositive/tez/vectorization_pushdown.q.out index 2f53a3c..725cc02 100644 --- a/ql/src/test/results/clientpositive/tez/vectorization_pushdown.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorization_pushdown.q.out @@ -32,10 +32,10 @@ STAGE PLANS: aggregations: avg(cbigint) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct) Execution mode: vectorized Reducer 2 @@ -44,10 +44,10 @@ STAGE PLANS: aggregations: avg(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out index 007cd5f..5a6ebc1 100644 --- a/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out @@ -162,10 +162,10 @@ STAGE PLANS: aggregations: avg(cint), sum(cdouble), stddev_pop(cint), stddev_samp(csmallint), var_samp(cint), avg(cfloat), stddev_samp(cint), min(ctinyint), count(csmallint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: double), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: tinyint), _col8 (type: bigint) Execution mode: vectorized Reducer 2 @@ -174,14 +174,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), sum(VALUE._col1), stddev_pop(VALUE._col2), stddev_samp(VALUE._col3), var_samp(VALUE._col4), avg(VALUE._col5), stddev_samp(VALUE._col6), min(VALUE._col7), count(VALUE._col8) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (_col0 + -3728.0) (type: double), (- (_col0 + -3728.0)) (type: double), (- (- (_col0 + -3728.0))) (type: double), ((- (- (_col0 + -3728.0))) * (_col0 + -3728.0)) (type: double), _col1 (type: double), (- _col0) (type: double), _col2 (type: double), (((- (- (_col0 + -3728.0))) * (_col0 + -3728.0)) * (- (- (_col0 + -3728.0)))) (type: double), _col3 (type: double), (- _col2) (type: double), (_col2 - (- (- (_col0 + -3728.0)))) (type: double), ((_col2 - (- (- (_col0 + -3728.0)))) * _col2) (type: double), _col4 (type: double), _col5 (type: double), (10.175 - _col4) (type: double), (- (10.175 - _col4)) (type: double), ((- _col2) / -563.0) (type: double), _col6 (type: double), (- ((- _col2) / -563.0)) (type: double), (_col0 / _col1) (type: double), _col7 (type: tinyint), _col8 (type: bigint), (UDFToDouble(_col7) / ((- _col2) / -563.0)) (type: double), (- (_col0 / _col1)) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24 - Statistics: Num rows: 1 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -374,10 +374,10 @@ STAGE PLANS: aggregations: max(cint), var_pop(cbigint), stddev_pop(csmallint), max(cdouble), avg(ctinyint), min(cint), min(cdouble), stddev_samp(csmallint), var_samp(cint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: struct), _col2 (type: struct), _col3 (type: double), _col4 (type: struct), _col5 (type: int), _col6 (type: double), _col7 (type: struct), _col8 (type: struct) Execution mode: vectorized Reducer 2 @@ -386,14 +386,14 @@ STAGE PLANS: aggregations: max(VALUE._col0), var_pop(VALUE._col1), stddev_pop(VALUE._col2), max(VALUE._col3), avg(VALUE._col4), min(VALUE._col5), min(VALUE._col6), stddev_samp(VALUE._col7), var_samp(VALUE._col8) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), (UDFToDouble(_col0) / -3728.0) (type: double), (_col0 * -3728) (type: int), _col1 (type: double), (- (_col0 * -3728)) (type: int), _col2 (type: double), (-563 % (_col0 * -3728)) (type: int), (_col1 / _col2) (type: double), (- _col2) (type: double), _col3 (type: double), _col4 (type: double), (_col2 - 10.175) (type: double), _col5 (type: int), (UDFToDouble((_col0 * -3728)) % (_col2 - 10.175)) (type: double), (- _col3) (type: double), _col6 (type: double), (_col3 % -26.28) (type: double), _col7 (type: double), (- (UDFToDouble(_col0) / -3728.0)) (type: double), ((- (_col0 * -3728)) % (-563 % (_col0 * -3728))) (type: int), ((UDFToDouble(_col0) / -3728.0) - _col4) (type: double), (- (_col0 * -3728)) (type: int), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -578,10 +578,10 @@ STAGE PLANS: aggregations: var_pop(cbigint), count(), max(ctinyint), stddev_pop(csmallint), max(cint), stddev_samp(cdouble), count(ctinyint), avg(ctinyint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: bigint), _col2 (type: tinyint), _col3 (type: struct), _col4 (type: int), _col5 (type: struct), _col6 (type: bigint), _col7 (type: struct) Execution mode: vectorized Reducer 2 @@ -590,14 +590,14 @@ STAGE PLANS: aggregations: var_pop(VALUE._col0), count(VALUE._col1), max(VALUE._col2), stddev_pop(VALUE._col3), max(VALUE._col4), stddev_samp(VALUE._col5), count(VALUE._col6), avg(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (- _col0) (type: double), (_col0 - (- _col0)) (type: double), _col1 (type: bigint), (CAST( _col1 AS decimal(19,0)) % 79.553) (type: decimal(5,3)), _col2 (type: tinyint), (UDFToDouble(_col1) - (- _col0)) (type: double), (- (- _col0)) (type: double), (-1.0 % (- _col0)) (type: double), _col1 (type: bigint), (- _col1) (type: bigint), _col3 (type: double), (- (- (- _col0))) (type: double), (762 * (- _col1)) (type: bigint), _col4 (type: int), (UDFToLong(_col2) + (762 * (- _col1))) (type: bigint), ((- _col0) + UDFToDouble(_col4)) (type: double), _col5 (type: double), ((- _col1) % _col1) (type: bigint), _col6 (type: bigint), _col7 (type: double), (-3728 % (UDFToLong(_col2) + (762 * (- _col1)))) (type: bigint) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21 - Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -761,10 +761,10 @@ STAGE PLANS: aggregations: avg(ctinyint), max(cbigint), stddev_samp(cint), var_pop(cint), var_pop(cbigint), max(cfloat) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: bigint), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: float) Execution mode: vectorized Reducer 2 @@ -773,14 +773,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), max(VALUE._col1), stddev_samp(VALUE._col2), var_pop(VALUE._col3), var_pop(VALUE._col4), max(VALUE._col5) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (_col0 + 6981.0) (type: double), ((_col0 + 6981.0) + _col0) (type: double), _col1 (type: bigint), (((_col0 + 6981.0) + _col0) / _col0) (type: double), (- (_col0 + 6981.0)) (type: double), _col2 (type: double), (_col0 % (- (_col0 + 6981.0))) (type: double), _col3 (type: double), _col4 (type: double), (- _col1) (type: bigint), (UDFToDouble((- _col1)) / _col2) (type: double), _col5 (type: float), (_col4 * -26.28) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vectorized_distinct_gby.q.out b/ql/src/test/results/clientpositive/tez/vectorized_distinct_gby.q.out index 936e0df..9f5c65c 100644 --- a/ql/src/test/results/clientpositive/tez/vectorized_distinct_gby.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorized_distinct_gby.q.out @@ -136,10 +136,10 @@ STAGE PLANS: aggregations: sum(_col0), count(_col0), avg(_col0), std(_col0) mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: struct), _col3 (type: struct) Reducer 3 Reduce Operator Tree: @@ -147,10 +147,10 @@ STAGE PLANS: aggregations: sum(VALUE._col0), count(VALUE._col1), avg(VALUE._col2), std(VALUE._col3) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vectorized_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/tez/vectorized_dynamic_partition_pruning.q.out index 381d05b..cb8aa0b 100644 --- a/ql/src/test/results/clientpositive/tez/vectorized_dynamic_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorized_dynamic_partition_pruning.q.out @@ -2992,10 +2992,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 7 Map Operator Tree: @@ -3010,10 +3010,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 2 Reduce Operator Tree: @@ -3055,34 +3055,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 8 Execution mode: vectorized @@ -3091,34 +3091,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 6 Vertex: Union 6 @@ -3193,10 +3193,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 7 Map Operator Tree: @@ -3211,10 +3211,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 2 Reduce Operator Tree: @@ -3258,34 +3258,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 8 Execution mode: vectorized @@ -3294,34 +3294,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 6 Vertex: Union 6 @@ -3399,10 +3399,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 5 Map Operator Tree: @@ -3433,10 +3433,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 11 Execution mode: vectorized @@ -3445,49 +3445,49 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 5 Reducer 2 Execution mode: vectorized @@ -3539,49 +3539,49 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 5 Union 3 Vertex: Union 3 @@ -5204,10 +5204,10 @@ STAGE PLANS: aggregations: max(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 6 Map Operator Tree: @@ -5222,10 +5222,10 @@ STAGE PLANS: aggregations: min(ds) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 2 Execution mode: vectorized @@ -5249,34 +5249,34 @@ STAGE PLANS: aggregations: max(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 7 Execution mode: vectorized @@ -5285,34 +5285,34 @@ STAGE PLANS: aggregations: min(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col0 is not null (type: boolean) - Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Union 5 Vertex: Union 5 diff --git a/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out b/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out index fb583c2..d94bfd3 100644 --- a/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out @@ -54,10 +54,10 @@ STAGE PLANS: aggregations: count(_col0), max(_col1), min(_col0), avg(_col2) mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: int), _col2 (type: int), _col3 (type: struct) Execution mode: vectorized Map 3 @@ -84,10 +84,10 @@ STAGE PLANS: aggregations: count(VALUE._col0), max(VALUE._col1), min(VALUE._col2), avg(VALUE._col3) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vectorized_shufflejoin.q.out b/ql/src/test/results/clientpositive/tez/vectorized_shufflejoin.q.out index 3f23c77..f13d069 100644 --- a/ql/src/test/results/clientpositive/tez/vectorized_shufflejoin.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorized_shufflejoin.q.out @@ -78,10 +78,10 @@ STAGE PLANS: aggregations: count(_col0), max(_col1), min(_col0), avg(_col2) mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: int), _col2 (type: int), _col3 (type: struct) Reducer 3 Reduce Operator Tree: @@ -89,11 +89,11 @@ STAGE PLANS: aggregations: count(VALUE._col0), max(VALUE._col1), min(VALUE._col2), avg(VALUE._col3) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: double) Reducer 4 Execution mode: vectorized @@ -101,10 +101,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/tez/vectorized_timestamp_funcs.q.out index a18d3cf..394c3a7 100644 --- a/ql/src/test/results/clientpositive/tez/vectorized_timestamp_funcs.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorized_timestamp_funcs.q.out @@ -828,10 +828,10 @@ STAGE PLANS: aggregations: avg(ctimestamp1), variance(ctimestamp1), var_pop(ctimestamp1), var_samp(ctimestamp1), std(ctimestamp1), stddev(ctimestamp1), stddev_pop(ctimestamp1), stddev_samp(ctimestamp1) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reducer 2 @@ -840,14 +840,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: round(_col0, 0) (type: double), _col1 BETWEEN 8.97077295279421E19 AND 8.97077295279422E19 (type: boolean), _col2 BETWEEN 8.97077295279421E19 AND 8.97077295279422E19 (type: boolean), _col3 BETWEEN 9.20684592523616E19 AND 9.20684592523617E19 (type: boolean), round(_col4, 3) (type: double), round(_col5, 3) (type: double), round(_col6, 3) (type: double), round(_col7, 3) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/udaf_number_format.q.out b/ql/src/test/results/clientpositive/udaf_number_format.q.out index 624b42a..14ab23d 100644 --- a/ql/src/test/results/clientpositive/udaf_number_format.q.out +++ b/ql/src/test/results/clientpositive/udaf_number_format.q.out @@ -29,10 +29,10 @@ STAGE PLANS: aggregations: sum('a'), avg('a'), variance('a'), std('a') mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: double), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct) Reduce Operator Tree: Group By Operator diff --git a/ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out b/ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out index 3f5a540..580d98a 100644 --- a/ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out +++ b/ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out @@ -531,20 +531,20 @@ STAGE PLANS: aggregations: percentile_approx(_col0, 0.5) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: array) Reduce Operator Tree: Group By Operator aggregations: percentile_approx(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -592,20 +592,20 @@ STAGE PLANS: aggregations: percentile_approx(_col0, 0.5) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: array) Reduce Operator Tree: Group By Operator aggregations: percentile_approx(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/udf3.q.out b/ql/src/test/results/clientpositive/udf3.q.out index 8b2ad31..528b8c7 100644 --- a/ql/src/test/results/clientpositive/udf3.q.out +++ b/ql/src/test/results/clientpositive/udf3.q.out @@ -32,10 +32,10 @@ STAGE PLANS: aggregations: count(null), sum(null), avg(null), min(null), max(null) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: struct), _col3 (type: int), _col4 (type: int) Reduce Operator Tree: Group By Operator diff --git a/ql/src/test/results/clientpositive/udf4.q.out b/ql/src/test/results/clientpositive/udf4.q.out index d0b56da..5d263eb 100644 --- a/ql/src/test/results/clientpositive/udf4.q.out +++ b/ql/src/test/results/clientpositive/udf4.q.out @@ -79,10 +79,10 @@ STAGE PLANS: Select Operator expressions: 1 (type: decimal(1,0)), 2 (type: decimal(2,0)), -2 (type: decimal(2,0)), 1 (type: decimal(2,0)), 1 (type: decimal(2,0)), -2 (type: decimal(2,0)), 1.0 (type: double), null (type: double), 0.0 (type: double), 1 (type: decimal(2,0)), 2 (type: decimal(2,0)), -1 (type: decimal(2,0)), 1 (type: decimal(2,0)), rand(3) (type: double), 3 (type: int), -3 (type: int), 3 (type: int), -1 (type: int), -2 (type: int), -2 (type: tinyint), -2 (type: smallint), -2 (type: bigint), 0 (type: tinyint), 0 (type: smallint), 0 (type: int), 0 (type: bigint), 3 (type: tinyint), 3 (type: smallint), 3 (type: int), 3 (type: bigint), 2 (type: tinyint), 2 (type: smallint), 2 (type: int), 2 (type: bigint) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33 - Statistics: Num rows: 1 Data size: 1240 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 1248 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 1240 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 1248 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/udf7.q.out b/ql/src/test/results/clientpositive/udf7.q.out index b15ccaf..12ebb2d 100644 --- a/ql/src/test/results/clientpositive/udf7.q.out +++ b/ql/src/test/results/clientpositive/udf7.q.out @@ -49,7 +49,7 @@ STAGE PLANS: Select Operator expressions: 1.098612288668 (type: double), null (type: double), null (type: double), 1.098612288668 (type: double), null (type: double), null (type: double), 1.584962500721 (type: double), null (type: double), null (type: double), 0.47712125472 (type: double), null (type: double), null (type: double), 1.584962500721 (type: double), null (type: double), null (type: double), null (type: double), -1.0 (type: double), 7.389056098931 (type: double), 8.0 (type: double), 8.0 (type: double), 0.125 (type: double), 8.0 (type: double), 2.0 (type: double), NaN (type: double), 1.0 (type: double), 1.0 (type: double), 8.0 (type: double), 8.0 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27 - Statistics: Num rows: 1 Data size: 136 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT ROUND(LN(3.0),12), LN(0.0), LN(-1), ROUND(LOG(3.0),12), LOG(0.0), diff --git a/ql/src/test/results/clientpositive/udf8.q.out b/ql/src/test/results/clientpositive/udf8.q.out index 72cd434..d75cd78 100644 --- a/ql/src/test/results/clientpositive/udf8.q.out +++ b/ql/src/test/results/clientpositive/udf8.q.out @@ -49,20 +49,20 @@ STAGE PLANS: aggregations: avg(c1), sum(c1), count(c1) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: double), _col2 (type: bigint) Reduce Operator Tree: Group By Operator aggregations: avg(VALUE._col0), sum(VALUE._col1), count(VALUE._col2) mode: mergepartial outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/udf_case.q.out b/ql/src/test/results/clientpositive/udf_case.q.out index 5cf458a..efa58a8 100644 --- a/ql/src/test/results/clientpositive/udf_case.q.out +++ b/ql/src/test/results/clientpositive/udf_case.q.out @@ -91,7 +91,7 @@ STAGE PLANS: Select Operator expressions: 2 (type: int), 5 (type: int), 15 (type: int), null (type: int), 20 (type: int), 24 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 500 Data size: 10000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 10004 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT CASE 1 diff --git a/ql/src/test/results/clientpositive/udf_coalesce.q.out b/ql/src/test/results/clientpositive/udf_coalesce.q.out index a33efac..81e85fc 100644 --- a/ql/src/test/results/clientpositive/udf_coalesce.q.out +++ b/ql/src/test/results/clientpositive/udf_coalesce.q.out @@ -68,7 +68,7 @@ STAGE PLANS: Select Operator expressions: 1 (type: int), 1 (type: int), 2 (type: int), 1 (type: int), 3 (type: int), 4 (type: int), '1' (type: string), '1' (type: string), '2' (type: string), '1' (type: string), '3' (type: string), '4' (type: string), 1 (type: decimal(1,0)), 1 (type: decimal(1,0)), 2 (type: decimal(1,0)), 2 (type: decimal(1,0)), 2 (type: decimal(1,0)), null (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17 - Statistics: Num rows: 500 Data size: 547000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 547004 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT COALESCE(1), diff --git a/ql/src/test/results/clientpositive/udf_elt.q.out b/ql/src/test/results/clientpositive/udf_elt.q.out index 18f3481..8edfa6d 100644 --- a/ql/src/test/results/clientpositive/udf_elt.q.out +++ b/ql/src/test/results/clientpositive/udf_elt.q.out @@ -54,7 +54,7 @@ STAGE PLANS: Select Operator expressions: 'defg' (type: string), 'cc' (type: string), 'abc' (type: string), '2' (type: string), '12345' (type: string), '123456789012' (type: string), '1.25' (type: string), '16.0' (type: string), null (type: string), null (type: string), null (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 500 Data size: 353500 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 353752 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT elt(2, 'abc', 'defg'), diff --git a/ql/src/test/results/clientpositive/udf_greatest.q.out b/ql/src/test/results/clientpositive/udf_greatest.q.out index 7c7e67a..7ba3758 100644 --- a/ql/src/test/results/clientpositive/udf_greatest.q.out +++ b/ql/src/test/results/clientpositive/udf_greatest.q.out @@ -60,7 +60,7 @@ STAGE PLANS: Select Operator expressions: 'c' (type: string), 'a' (type: string), 'AaA' (type: string), 'AAA' (type: string), '13' (type: string), '2' (type: string), '03' (type: string), '1' (type: string), null (type: double), null (type: double), null (type: double), null (type: double), null (type: double), null (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - Statistics: Num rows: 500 Data size: 343000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 343048 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT GREATEST('a', 'b', 'c'), diff --git a/ql/src/test/results/clientpositive/udf_if.q.out b/ql/src/test/results/clientpositive/udf_if.q.out index c45483b..1114b69 100644 --- a/ql/src/test/results/clientpositive/udf_if.q.out +++ b/ql/src/test/results/clientpositive/udf_if.q.out @@ -41,7 +41,7 @@ STAGE PLANS: Select Operator expressions: 1 (type: int), '1' (type: string), 1 (type: int), 1 (type: int), null (type: int), 2 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 500 Data size: 50500 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 50504 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT IF(TRUE, 1, 2) AS COL1, diff --git a/ql/src/test/results/clientpositive/udf_instr.q.out b/ql/src/test/results/clientpositive/udf_instr.q.out index eafd8d6..50e65a1 100644 --- a/ql/src/test/results/clientpositive/udf_instr.q.out +++ b/ql/src/test/results/clientpositive/udf_instr.q.out @@ -58,7 +58,7 @@ STAGE PLANS: Select Operator expressions: 1 (type: int), 0 (type: int), 2 (type: int), 2 (type: int), 0 (type: int), 0 (type: int), 2 (type: int), 3 (type: int), 4 (type: int), 2 (type: int), 3 (type: int), null (type: int), null (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 - Statistics: Num rows: 500 Data size: 22000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 22008 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT instr('abcd', 'abc'), diff --git a/ql/src/test/results/clientpositive/udf_least.q.out b/ql/src/test/results/clientpositive/udf_least.q.out index 2634d1a..d6e1a23 100644 --- a/ql/src/test/results/clientpositive/udf_least.q.out +++ b/ql/src/test/results/clientpositive/udf_least.q.out @@ -60,7 +60,7 @@ STAGE PLANS: Select Operator expressions: 'a' (type: string), 'B' (type: string), 'AAA' (type: string), 'A' (type: string), '11' (type: string), '11' (type: string), '01' (type: string), '01' (type: string), null (type: double), null (type: double), null (type: double), null (type: double), null (type: double), null (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - Statistics: Num rows: 500 Data size: 343000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 343048 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT LEAST('a', 'b', 'c'), diff --git a/ql/src/test/results/clientpositive/udf_locate.q.out b/ql/src/test/results/clientpositive/udf_locate.q.out index 1bf2b3a..00136dc 100644 --- a/ql/src/test/results/clientpositive/udf_locate.q.out +++ b/ql/src/test/results/clientpositive/udf_locate.q.out @@ -66,7 +66,7 @@ STAGE PLANS: Select Operator expressions: 1 (type: int), 0 (type: int), 2 (type: int), 2 (type: int), 4 (type: int), 4 (type: int), 0 (type: int), 0 (type: int), 2 (type: int), 3 (type: int), 4 (type: int), 2 (type: int), 3 (type: int), null (type: int), null (type: int), 0 (type: int), 0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Statistics: Num rows: 500 Data size: 30000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 30008 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT locate('abc', 'abcd'), diff --git a/ql/src/test/results/clientpositive/udf_trunc.q.out b/ql/src/test/results/clientpositive/udf_trunc.q.out index 6be1196..4c9f76d 100644 --- a/ql/src/test/results/clientpositive/udf_trunc.q.out +++ b/ql/src/test/results/clientpositive/udf_trunc.q.out @@ -581,7 +581,7 @@ STAGE PLANS: Select Operator expressions: '2014-02-01' (type: string), null (type: string), null (type: string), null (type: string), null (type: string), null (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 514 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT @@ -640,7 +640,7 @@ STAGE PLANS: Select Operator expressions: '2014-01-01' (type: string), null (type: string), null (type: string), null (type: string), null (type: string), null (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 514 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT diff --git a/ql/src/test/results/clientpositive/udf_when.q.out b/ql/src/test/results/clientpositive/udf_when.q.out index fce1603..7a5cc1d 100644 --- a/ql/src/test/results/clientpositive/udf_when.q.out +++ b/ql/src/test/results/clientpositive/udf_when.q.out @@ -91,7 +91,7 @@ STAGE PLANS: Select Operator expressions: 2 (type: int), 9 (type: int), 14 (type: int), null (type: int), 24 (type: int), null (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 500 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 8008 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT CASE diff --git a/ql/src/test/results/clientpositive/udtf_stack.q.out b/ql/src/test/results/clientpositive/udtf_stack.q.out index 95433a3..e2835fa 100644 --- a/ql/src/test/results/clientpositive/udtf_stack.q.out +++ b/ql/src/test/results/clientpositive/udtf_stack.q.out @@ -178,13 +178,13 @@ STAGE PLANS: Select Operator expressions: 1 (type: int), 'en' (type: string), 'dbpedia' (type: string), null (type: void) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 181 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: COMPLETE UDTF Operator - Statistics: Num rows: 1 Data size: 181 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: COMPLETE function name: stack File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 181 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/union_remove_6_subq.q.out b/ql/src/test/results/clientpositive/union_remove_6_subq.q.out index 59ccf49..b1e79e7 100644 --- a/ql/src/test/results/clientpositive/union_remove_6_subq.q.out +++ b/ql/src/test/results/clientpositive/union_remove_6_subq.q.out @@ -345,10 +345,10 @@ STAGE PLANS: aggregations: avg(_col0) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct) TableScan Union @@ -357,10 +357,10 @@ STAGE PLANS: aggregations: avg(_col0) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct) Reduce Operator Tree: Group By Operator diff --git a/ql/src/test/results/clientpositive/vector_aggregate_9.q.out b/ql/src/test/results/clientpositive/vector_aggregate_9.q.out index 84258dc..19f8093 100644 --- a/ql/src/test/results/clientpositive/vector_aggregate_9.q.out +++ b/ql/src/test/results/clientpositive/vector_aggregate_9.q.out @@ -126,10 +126,10 @@ STAGE PLANS: aggregations: min(dc), max(dc), sum(dc), avg(dc) mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 624 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 624 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(38,18)), _col1 (type: decimal(38,18)), _col2 (type: decimal(38,18)), _col3 (type: struct) Execution mode: vectorized Reduce Operator Tree: @@ -137,10 +137,10 @@ STAGE PLANS: aggregations: min(VALUE._col0), max(VALUE._col1), sum(VALUE._col2), avg(VALUE._col3) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 624 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 624 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out b/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out index 112cb03..3043a6c 100644 --- a/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out +++ b/ql/src/test/results/clientpositive/vector_aggregate_without_gby.q.out @@ -59,10 +59,10 @@ STAGE PLANS: aggregations: max(dt), max(greg_dt) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) Execution mode: vectorized Reduce Operator Tree: @@ -70,10 +70,10 @@ STAGE PLANS: aggregations: max(VALUE._col0), max(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_coalesce.q.out b/ql/src/test/results/clientpositive/vector_coalesce.q.out index e21dfcf..0101b66 100644 --- a/ql/src/test/results/clientpositive/vector_coalesce.q.out +++ b/ql/src/test/results/clientpositive/vector_coalesce.q.out @@ -24,31 +24,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 1045942 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: cdouble is null (type: boolean) - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3114 Data size: 265164 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: cstring1 (type: string), cint (type: int), cfloat (type: float), csmallint (type: smallint), COALESCE(null,cstring1,cint,cfloat,csmallint) (type: string) outputColumnNames: _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3114 Data size: 819540 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: string), _col2 (type: int), _col3 (type: float), _col4 (type: smallint), _col5 (type: string) sort order: +++++ - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3114 Data size: 819540 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: null (type: double), KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: float), KEY.reducesinkkey3 (type: smallint), KEY.reducesinkkey4 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3114 Data size: 246572 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -108,31 +108,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 146792 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ctinyint is null (type: boolean) - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3115 Data size: 37224 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: cdouble (type: double), cint (type: int), COALESCE(null,(cdouble + log2(cint)),0) (type: double) outputColumnNames: _col1, _col2, _col3 - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3115 Data size: 52844 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: double), _col2 (type: int), _col3 (type: double) sort order: +++ - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3115 Data size: 52844 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: null (type: tinyint), KEY.reducesinkkey0 (type: double), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3115 Data size: 27928 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -192,28 +192,28 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 110088 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (cfloat is null and cbigint is null) (type: boolean) - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 7092 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 3172 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 3172 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: null (type: float), null (type: bigint), 0.0 (type: float) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 3172 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -273,31 +273,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 983040 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (ctimestamp1 is not null or ctimestamp2 is not null) (type: boolean) - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 983040 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), COALESCE(ctimestamp1,ctimestamp2) (type: timestamp) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 1474560 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: timestamp), _col1 (type: timestamp), _col2 (type: timestamp) sort order: +++ - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 1474560 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: timestamp), KEY.reducesinkkey2 (type: timestamp) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 1474560 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1200 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1200 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -357,28 +357,28 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 110088 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (cfloat is null and cbigint is null) (type: boolean) - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 7092 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: null (type: float), null (type: bigint), null (type: float) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3072 Data size: 660491 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 790 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -436,20 +436,20 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc - Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12288 Data size: 110088 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: cbigint is null (type: boolean) - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3115 Data size: 27912 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: null (type: bigint), ctinyint (type: tinyint), COALESCE(null,ctinyint) (type: tinyint) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 6144 Data size: 1320982 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3115 Data size: 21772 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_decimal_precision.q.out b/ql/src/test/results/clientpositive/vector_decimal_precision.q.out index d65a1ce..c5f9e4d 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_precision.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_precision.q.out @@ -568,10 +568,10 @@ STAGE PLANS: aggregations: avg(dec), sum(dec) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: decimal(30,10)) Execution mode: vectorized Reduce Operator Tree: @@ -579,10 +579,10 @@ STAGE PLANS: aggregations: avg(VALUE._col0), sum(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_decimal_udf.q.out b/ql/src/test/results/clientpositive/vector_decimal_udf.q.out index 5062cae..1fbc05a 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_udf.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_udf.q.out @@ -2406,20 +2406,20 @@ STAGE PLANS: aggregations: histogram_numeric(_col0, 3) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: array) Reduce Operator Tree: Group By Operator aggregations: histogram_numeric(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 720 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 720 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_elt.q.out b/ql/src/test/results/clientpositive/vector_elt.q.out index d494296..08ca167 100644 --- a/ql/src/test/results/clientpositive/vector_elt.q.out +++ b/ql/src/test/results/clientpositive/vector_elt.q.out @@ -104,10 +104,10 @@ STAGE PLANS: Select Operator expressions: 'defg' (type: string), 'cc' (type: string), 'abc' (type: string), '2' (type: string), '12345' (type: string), '123456789012' (type: string), '1.25' (type: string), '16.0' (type: string), null (type: string), null (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Statistics: Num rows: 12288 Data size: 8687616 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 12288 Data size: 8687784 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 1 - Statistics: Num rows: 1 Data size: 707 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 875 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT elt(2, 'abc', 'defg'), diff --git a/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out b/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out index f7c8a08..ff16b3b 100644 --- a/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out +++ b/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out @@ -480,23 +480,23 @@ STAGE PLANS: alias: interval_arithmetic_1 Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 50 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 50 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: 5-5 (type: interval_year_month), -1-1 (type: interval_year_month) outputColumnNames: _col0, _col1 - Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 50 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 2 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -978,13 +978,13 @@ STAGE PLANS: Select Operator expressions: 109 20:30:40.246913578 (type: interval_day_time), 89 02:14:26.000000000 (type: interval_day_time) outputColumnNames: _col0, _col1 - Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 50 Data size: 1200 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 2 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_null_projection.q.out b/ql/src/test/results/clientpositive/vector_null_projection.q.out index 2e75731..779f787 100644 --- a/ql/src/test/results/clientpositive/vector_null_projection.q.out +++ b/ql/src/test/results/clientpositive/vector_null_projection.q.out @@ -58,10 +58,10 @@ STAGE PLANS: Select Operator expressions: null (type: void) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -100,53 +100,53 @@ STAGE PLANS: alias: a Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Union - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator keys: null (type: void) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: void) sort order: + Map-reduce partition columns: _col0 (type: void) - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE TableScan alias: b Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Union - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator keys: null (type: void) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: void) sort order: + Map-reduce partition columns: _col0 (type: void) - Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: void) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: null (type: void) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_nvl.q.out b/ql/src/test/results/clientpositive/vector_nvl.q.out index 69712aa..8330810 100644 --- a/ql/src/test/results/clientpositive/vector_nvl.q.out +++ b/ql/src/test/results/clientpositive/vector_nvl.q.out @@ -203,10 +203,10 @@ STAGE PLANS: Select Operator expressions: null (type: void) outputColumnNames: _col0 - Statistics: Num rows: 12288 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 12288 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: SELECT nvl(null, null) as n diff --git a/ql/src/test/results/clientpositive/vector_udf1.q.out b/ql/src/test/results/clientpositive/vector_udf1.q.out index 92fa06f..0d8c206 100644 --- a/ql/src/test/results/clientpositive/vector_udf1.q.out +++ b/ql/src/test/results/clientpositive/vector_udf1.q.out @@ -1447,20 +1447,20 @@ STAGE PLANS: aggregations: compute_stats(_col0, 16), compute_stats(_col2, 16) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 984 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 984 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct) Reduce Operator Tree: Group By Operator aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 984 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 984 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1518,10 +1518,10 @@ STAGE PLANS: aggregations: min(c2), min(c4) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: varchar(20)) Execution mode: vectorized Reduce Operator Tree: @@ -1529,10 +1529,10 @@ STAGE PLANS: aggregations: min(VALUE._col0), min(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1590,10 +1590,10 @@ STAGE PLANS: aggregations: max(c2), max(c4) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: varchar(20)) Execution mode: vectorized Reduce Operator Tree: @@ -1601,10 +1601,10 @@ STAGE PLANS: aggregations: max(VALUE._col0), max(VALUE._col1) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vectorization_0.q.out b/ql/src/test/results/clientpositive/vectorization_0.q.out index 2d684f1..ac33721 100644 --- a/ql/src/test/results/clientpositive/vectorization_0.q.out +++ b/ql/src/test/results/clientpositive/vectorization_0.q.out @@ -241,10 +241,10 @@ STAGE PLANS: aggregations: avg(ctinyint), variance(ctinyint), var_pop(ctinyint), var_samp(ctinyint), std(ctinyint), stddev(ctinyint), stddev_pop(ctinyint), stddev_samp(ctinyint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reduce Operator Tree: @@ -252,7 +252,7 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -267,16 +267,16 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double) Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: double), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -554,10 +554,10 @@ STAGE PLANS: aggregations: avg(cbigint), variance(cbigint), var_pop(cbigint), var_samp(cbigint), std(cbigint), stddev(cbigint), stddev_pop(cbigint), stddev_samp(cbigint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reduce Operator Tree: @@ -565,7 +565,7 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -580,16 +580,16 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double) Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: double), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -867,10 +867,10 @@ STAGE PLANS: aggregations: avg(cfloat), variance(cfloat), var_pop(cfloat), var_samp(cfloat), std(cfloat), stddev(cfloat), stddev_pop(cfloat), stddev_samp(cfloat) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reduce Operator Tree: @@ -878,7 +878,7 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -893,16 +893,16 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double) Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: double), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 636 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1020,10 +1020,10 @@ STAGE PLANS: aggregations: avg(cbigint), stddev_pop(cbigint), var_samp(cbigint), count(), sum(cfloat), min(ctinyint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: bigint), _col4 (type: double), _col5 (type: tinyint) Execution mode: vectorized Reduce Operator Tree: @@ -1031,14 +1031,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), stddev_pop(VALUE._col1), var_samp(VALUE._col2), count(VALUE._col3), sum(VALUE._col4), min(VALUE._col5) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (- _col0) (type: double), (-6432.0 + _col0) (type: double), _col1 (type: double), (- (-6432.0 + _col0)) (type: double), ((- (-6432.0 + _col0)) + (-6432.0 + _col0)) (type: double), _col2 (type: double), (- (-6432.0 + _col0)) (type: double), (-6432.0 + (- (-6432.0 + _col0))) (type: double), (- (-6432.0 + _col0)) (type: double), ((- (-6432.0 + _col0)) / (- (-6432.0 + _col0))) (type: double), _col3 (type: bigint), _col4 (type: double), (_col2 % _col1) (type: double), (- _col2) (type: double), ((- (-6432.0 + _col0)) * (- _col0)) (type: double), _col5 (type: tinyint), (- _col5) (type: tinyint) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vectorization_pushdown.q.out b/ql/src/test/results/clientpositive/vectorization_pushdown.q.out index 6bbfed6..04780c4 100644 --- a/ql/src/test/results/clientpositive/vectorization_pushdown.q.out +++ b/ql/src/test/results/clientpositive/vectorization_pushdown.q.out @@ -26,10 +26,10 @@ STAGE PLANS: aggregations: avg(cbigint) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct) Execution mode: vectorized Reduce Operator Tree: @@ -37,10 +37,10 @@ STAGE PLANS: aggregations: avg(VALUE._col0) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/vectorization_short_regress.q.out index 46d4e6b..a101668 100644 --- a/ql/src/test/results/clientpositive/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/vectorization_short_regress.q.out @@ -156,10 +156,10 @@ STAGE PLANS: aggregations: avg(cint), sum(cdouble), stddev_pop(cint), stddev_samp(csmallint), var_samp(cint), avg(cfloat), stddev_samp(cint), min(ctinyint), count(csmallint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: double), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: tinyint), _col8 (type: bigint) Execution mode: vectorized Reduce Operator Tree: @@ -167,14 +167,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), sum(VALUE._col1), stddev_pop(VALUE._col2), stddev_samp(VALUE._col3), var_samp(VALUE._col4), avg(VALUE._col5), stddev_samp(VALUE._col6), min(VALUE._col7), count(VALUE._col8) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (_col0 + -3728.0) (type: double), (- (_col0 + -3728.0)) (type: double), (- (- (_col0 + -3728.0))) (type: double), ((- (- (_col0 + -3728.0))) * (_col0 + -3728.0)) (type: double), _col1 (type: double), (- _col0) (type: double), _col2 (type: double), (((- (- (_col0 + -3728.0))) * (_col0 + -3728.0)) * (- (- (_col0 + -3728.0)))) (type: double), _col3 (type: double), (- _col2) (type: double), (_col2 - (- (- (_col0 + -3728.0)))) (type: double), ((_col2 - (- (- (_col0 + -3728.0)))) * _col2) (type: double), _col4 (type: double), _col5 (type: double), (10.175 - _col4) (type: double), (- (10.175 - _col4)) (type: double), ((- _col2) / -563.0) (type: double), _col6 (type: double), (- ((- _col2) / -563.0)) (type: double), (_col0 / _col1) (type: double), _col7 (type: tinyint), _col8 (type: bigint), (UDFToDouble(_col7) / ((- _col2) / -563.0)) (type: double), (- (_col0 / _col1)) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24 - Statistics: Num rows: 1 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -361,10 +361,10 @@ STAGE PLANS: aggregations: max(cint), var_pop(cbigint), stddev_pop(csmallint), max(cdouble), avg(ctinyint), min(cint), min(cdouble), stddev_samp(csmallint), var_samp(cint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: struct), _col2 (type: struct), _col3 (type: double), _col4 (type: struct), _col5 (type: int), _col6 (type: double), _col7 (type: struct), _col8 (type: struct) Execution mode: vectorized Reduce Operator Tree: @@ -372,14 +372,14 @@ STAGE PLANS: aggregations: max(VALUE._col0), var_pop(VALUE._col1), stddev_pop(VALUE._col2), max(VALUE._col3), avg(VALUE._col4), min(VALUE._col5), min(VALUE._col6), stddev_samp(VALUE._col7), var_samp(VALUE._col8) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), (UDFToDouble(_col0) / -3728.0) (type: double), (_col0 * -3728) (type: int), _col1 (type: double), (- (_col0 * -3728)) (type: int), _col2 (type: double), (-563 % (_col0 * -3728)) (type: int), (_col1 / _col2) (type: double), (- _col2) (type: double), _col3 (type: double), _col4 (type: double), (_col2 - 10.175) (type: double), _col5 (type: int), (UDFToDouble((_col0 * -3728)) % (_col2 - 10.175)) (type: double), (- _col3) (type: double), _col6 (type: double), (_col3 % -26.28) (type: double), _col7 (type: double), (- (UDFToDouble(_col0) / -3728.0)) (type: double), ((- (_col0 * -3728)) % (-563 % (_col0 * -3728))) (type: int), ((UDFToDouble(_col0) / -3728.0) - _col4) (type: double), (- (_col0 * -3728)) (type: int), _col8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -558,10 +558,10 @@ STAGE PLANS: aggregations: var_pop(cbigint), count(), max(ctinyint), stddev_pop(csmallint), max(cint), stddev_samp(cdouble), count(ctinyint), avg(ctinyint) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: bigint), _col2 (type: tinyint), _col3 (type: struct), _col4 (type: int), _col5 (type: struct), _col6 (type: bigint), _col7 (type: struct) Execution mode: vectorized Reduce Operator Tree: @@ -569,14 +569,14 @@ STAGE PLANS: aggregations: var_pop(VALUE._col0), count(VALUE._col1), max(VALUE._col2), stddev_pop(VALUE._col3), max(VALUE._col4), stddev_samp(VALUE._col5), count(VALUE._col6), avg(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (- _col0) (type: double), (_col0 - (- _col0)) (type: double), _col1 (type: bigint), (CAST( _col1 AS decimal(19,0)) % 79.553) (type: decimal(5,3)), _col2 (type: tinyint), (UDFToDouble(_col1) - (- _col0)) (type: double), (- (- _col0)) (type: double), (-1.0 % (- _col0)) (type: double), _col1 (type: bigint), (- _col1) (type: bigint), _col3 (type: double), (- (- (- _col0))) (type: double), (762 * (- _col1)) (type: bigint), _col4 (type: int), (UDFToLong(_col2) + (762 * (- _col1))) (type: bigint), ((- _col0) + UDFToDouble(_col4)) (type: double), _col5 (type: double), ((- _col1) % _col1) (type: bigint), _col6 (type: bigint), _col7 (type: double), (-3728 % (UDFToLong(_col2) + (762 * (- _col1)))) (type: bigint) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21 - Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -734,10 +734,10 @@ STAGE PLANS: aggregations: avg(ctinyint), max(cbigint), stddev_samp(cint), var_pop(cint), var_pop(cbigint), max(cfloat) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: bigint), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: float) Execution mode: vectorized Reduce Operator Tree: @@ -745,14 +745,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), max(VALUE._col1), stddev_samp(VALUE._col2), var_pop(VALUE._col3), var_pop(VALUE._col4), max(VALUE._col5) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double), (_col0 + 6981.0) (type: double), ((_col0 + 6981.0) + _col0) (type: double), _col1 (type: bigint), (((_col0 + 6981.0) + _col0) / _col0) (type: double), (- (_col0 + 6981.0)) (type: double), _col2 (type: double), (_col0 % (- (_col0 + 6981.0))) (type: double), _col3 (type: double), _col4 (type: double), (- _col1) (type: bigint), (UDFToDouble((- _col1)) / _col2) (type: double), _col5 (type: float), (_col4 * -26.28) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vectorized_distinct_gby.q.out b/ql/src/test/results/clientpositive/vectorized_distinct_gby.q.out index 64e4b01..f8ae962 100644 --- a/ql/src/test/results/clientpositive/vectorized_distinct_gby.q.out +++ b/ql/src/test/results/clientpositive/vectorized_distinct_gby.q.out @@ -111,10 +111,10 @@ STAGE PLANS: aggregations: sum(DISTINCT KEY._col0:0._col0), count(DISTINCT KEY._col0:1._col0), avg(DISTINCT KEY._col0:2._col0), std(DISTINCT KEY._col0:3._col0) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 180 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 180 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vectorized_mapjoin.q.out b/ql/src/test/results/clientpositive/vectorized_mapjoin.q.out index 2ecf44e..af5e419 100644 --- a/ql/src/test/results/clientpositive/vectorized_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/vectorized_mapjoin.q.out @@ -68,10 +68,10 @@ STAGE PLANS: aggregations: count(_col0), max(_col1), min(_col0), avg(_col2) mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: int), _col2 (type: int), _col3 (type: struct) Execution mode: vectorized Local Work: @@ -81,10 +81,10 @@ STAGE PLANS: aggregations: count(VALUE._col0), max(VALUE._col1), min(VALUE._col2), avg(VALUE._col3) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out b/ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out index 2b13701..8077ff2 100644 --- a/ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out +++ b/ql/src/test/results/clientpositive/vectorized_shufflejoin.q.out @@ -67,7 +67,7 @@ STAGE PLANS: aggregations: count(_col0), max(_col1), min(_col0), avg(_col2) mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -81,14 +81,14 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: int), _col2 (type: int), _col3 (type: struct) Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0), max(VALUE._col1), min(VALUE._col2), avg(VALUE._col3) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -103,16 +103,16 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: double) Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out index 76570bc..58af3e2 100644 --- a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out +++ b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out @@ -774,10 +774,10 @@ STAGE PLANS: aggregations: avg(ctimestamp1), variance(ctimestamp1), var_pop(ctimestamp1), var_samp(ctimestamp1), std(ctimestamp1), stddev(ctimestamp1), stddev_pop(ctimestamp1), stddev_samp(ctimestamp1) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized Reduce Operator Tree: @@ -785,14 +785,14 @@ STAGE PLANS: aggregations: avg(VALUE._col0), variance(VALUE._col1), var_pop(VALUE._col2), var_samp(VALUE._col3), std(VALUE._col4), stddev(VALUE._col5), stddev_pop(VALUE._col6), stddev_samp(VALUE._col7) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: round(_col0, 0) (type: double), _col1 BETWEEN 8.97077295279421E19 AND 8.97077295279422E19 (type: boolean), _col2 BETWEEN 8.97077295279421E19 AND 8.97077295279422E19 (type: boolean), _col3 BETWEEN 9.20684592523616E19 AND 9.20684592523617E19 (type: boolean), round(_col4, 3) (type: double), round(_col5, 3) (type: double), round(_col6, 3) (type: double), round(_col7, 3) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat -- 1.7.12.4 (Apple Git-37)