Index: conf/hive-default.xml.template =================================================================== --- conf/hive-default.xml.template (revision 1613528) +++ conf/hive-default.xml.template (working copy) @@ -1,5 +1,7 @@ - +--> + @@ -1703,6 +1706,11 @@ Whether to push predicates down into storage handlers. Ignored when hive.optimize.ppd is false. + hive.optimize.constant.propagation + true + Whether to enable constant propagation optimizer + + hive.optimize.metadataonly true Index: hbase-handler/src/test/results/positive/ppd_key_ranges.q.out =================================================================== --- hbase-handler/src/test/results/positive/ppd_key_ranges.q.out (revision 1613528) +++ hbase-handler/src/test/results/positive/ppd_key_ranges.q.out (working copy) @@ -191,7 +191,7 @@ predicate: (((key >= 9) and (key < 17)) and (key = 11)) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: 11 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out =================================================================== --- hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out (revision 1613528) +++ hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out (working copy) @@ -193,10 +193,10 @@ Map Operator Tree: TableScan alias: hbase_pushdown - filterExpr: (key >= UDFToString((40 + 50))) (type: boolean) + filterExpr: (key >= '90') (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Filter Operator - predicate: (key >= UDFToString((40 + 50))) (type: boolean) + predicate: (key >= '90') (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) Index: hbase-handler/src/test/results/positive/hbase_pushdown.q.out =================================================================== --- hbase-handler/src/test/results/positive/hbase_pushdown.q.out (revision 1613528) +++ hbase-handler/src/test/results/positive/hbase_pushdown.q.out (working copy) @@ -43,7 +43,7 @@ predicate: (key = 90) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: 90 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -235,7 +235,7 @@ predicate: (((key = 80) and (key = 90)) and (value like '%90%')) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: 90 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -398,7 +398,7 @@ predicate: (key = 90) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: 90 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1613528) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -983,6 +983,8 @@ "Whether to transitively replicate predicate filters over equijoin conditions."), HIVEPPDREMOVEDUPLICATEFILTERS("hive.ppd.remove.duplicatefilters", true, "Whether to push predicates down into storage handlers. Ignored when hive.optimize.ppd is false."), + // Constant propagation optimizer + HIVEOPTCONSTANTPROPAGATION("hive.optimize.constant.propagation", true, "Whether to enable constant propagation optimizer"), HIVEMETADATAONLYQUERIES("hive.optimize.metadataonly", true, ""), HIVENULLSCANOPTIMIZE("hive.optimize.null.scan", true, "Dont scan relations which are guaranteed to not generate any rows"), HIVEOPTPPD_STORAGE("hive.optimize.ppd.storage", true, @@ -2250,7 +2252,6 @@ return hiveServer2SiteUrl; } - /** * @return the user name set in hadoop.job.ugi param or the current user from System * @throws IOException Index: contrib/src/test/results/clientpositive/udf_example_add.q.out =================================================================== --- contrib/src/test/results/clientpositive/udf_example_add.q.out (revision 1613528) +++ contrib/src/test/results/clientpositive/udf_example_add.q.out (working copy) @@ -36,7 +36,7 @@ alias: src Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: example_add(1, 2) (type: int), example_add(1, 2, 3) (type: int), example_add(1, 2, 3, 4) (type: int), example_add(1.1, 2.2) (type: double), example_add(1.1, 2.2, 3.3) (type: double), example_add(1.1, 2.2, 3.3, 4.4) (type: double), example_add(1, 2, 3, 4.4) (type: double) + expressions: 3 (type: int), 6 (type: int), 10 (type: int), 3.3000000000000003 (type: double), 6.6 (type: double), 11.0 (type: double), 10.4 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit Index: contrib/src/test/results/clientpositive/udf_example_format.q.out =================================================================== --- contrib/src/test/results/clientpositive/udf_example_format.q.out (revision 1613528) +++ contrib/src/test/results/clientpositive/udf_example_format.q.out (working copy) @@ -30,7 +30,7 @@ alias: src Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: example_format('abc') (type: string), example_format('%1$s', 1.1) (type: string), example_format('%1$s %2$e', 1.1, 1.2) (type: string), example_format('%1$x %2$o %3$d', 10, 10, 10) (type: string) + expressions: 'abc' (type: string), '1.1' (type: string), '1.1 1.200000e+00' (type: string), 'a 12 10' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit Index: ql/src/test/results/clientpositive/join14_hadoop20.q.out =================================================================== --- ql/src/test/results/clientpositive/join14_hadoop20.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/join14_hadoop20.q.out (working copy) @@ -30,7 +30,7 @@ alias: srcpart Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (key > 100) (type: boolean) + predicate: ((key > 100) and key is not null) (type: boolean) Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: string) @@ -42,7 +42,7 @@ alias: src Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (key > 100) (type: boolean) + predicate: ((key > 100) and key is not null) (type: boolean) Statistics: Num rows: 19 Data size: 1903 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: string) Index: ql/src/test/results/clientpositive/udf9.q.out =================================================================== --- ql/src/test/results/clientpositive/udf9.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf9.q.out (working copy) @@ -30,17 +30,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: datediff('2008-12-31', '2009-01-01') (type: int), datediff('2008-03-01', '2008-02-28') (type: int), datediff('2007-03-01', '2007-01-28') (type: int), datediff('2008-03-01 23:59:59', '2008-03-02 00:00:00') (type: int), date_add('2008-12-31', 1) (type: string), date_add('2008-12-31', 365) (type: string), date_add('2008-02-28', 2) (type: string), date_add('2009-02-28', 2) (type: string), date_add('2007-02-28', 365) (type: string), date_add('2007-02-28 23:59:59', 730) (type: string), date_sub('2009-01-01', 1) (type: string), date_sub('2009-01-01', 365) (type: string), date_sub('2008-02-28', 2) (type: string), date_sub('2009-02-28', 2) (type: string), date_sub('2007-02-28', 365) (type: string), date_sub('2007-02-28 01:12:34', 730) (type: string) + expressions: -1 (type: int), 2 (type: int), 32 (type: int), -1 (type: int), '2009-01-01' (type: string), '2009-12-31' (type: string), '2008-03-01' (type: string), '2009-03-02' (type: string), '2008-02-28' (type: string), '2009-02-27' (type: string), '2008-12-31' (type: string), '2008-01-02' (type: string), '2008-02-26' (type: string), '2009-02-26' (type: string), '2006-02-28' (type: string), '2005-02-28' (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/stats_empty_dyn_part.q.out =================================================================== --- ql/src/test/results/clientpositive/stats_empty_dyn_part.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/stats_empty_dyn_part.q.out (working copy) @@ -28,26 +28,26 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 51 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 'no_such_value') (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 2849 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: 'no_such_value' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 2849 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 2849 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Reduce Operator Tree: Extract - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 2849 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 2849 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/ppd_clusterby.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_clusterby.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/ppd_clusterby.q.out (working copy) @@ -14,28 +14,27 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 10) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '10' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) + expressions: '10' (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -73,49 +72,44 @@ Map Operator Tree: TableScan alias: y - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} - 1 {KEY.reducesinkkey0} - outputColumnNames: _col0, _col1, _col4 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (_col0 = 20) (type: boolean) - Statistics: Num rows: 7 Data size: 719 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string) - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 7 Data size: 719 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + 0 {VALUE._col0} + 1 + outputColumnNames: _col1 + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe Stage: Stage-2 Map Reduce @@ -125,16 +119,15 @@ key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 7 Data size: 719 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: string), _col2 (type: string) + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator - expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string) + expressions: '20' (type: string), KEY.reducesinkkey0 (type: string), '20' (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 7 Data size: 719 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 7 Data size: 719 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -171,28 +164,27 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 10) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '10' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) + expressions: '10' (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -230,40 +222,38 @@ Map Operator Tree: TableScan alias: y - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} - 1 {KEY.reducesinkkey0} - outputColumnNames: _col0, _col1, _col4 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + 0 {VALUE._col0} + 1 + outputColumnNames: _col1 + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string) - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + expressions: _col1 (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -279,16 +269,15 @@ key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: string), _col2 (type: string) + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator - expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string) + expressions: '20' (type: string), KEY.reducesinkkey0 (type: string), '20' (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/index_auto_update.q.out =================================================================== --- ql/src/test/results/clientpositive/index_auto_update.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/index_auto_update.q.out (working copy) @@ -259,7 +259,7 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), val (type: string) + expressions: '86' (type: string), val (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/input6.q.out =================================================================== --- ql/src/test/results/clientpositive/input6.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/input6.q.out (working copy) @@ -34,7 +34,7 @@ predicate: key is null (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: null (type: void), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/annotate_stats_select.q.out =================================================================== --- ql/src/test/results/clientpositive/annotate_stats_select.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/annotate_stats_select.q.out (working copy) @@ -1699,7 +1699,7 @@ Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE GatherStats: false Select Operator - expressions: unbase64('0xe23') (type: binary) + expressions: D317B6 (type: binary) outputColumnNames: _col0 Statistics: Num rows: 2 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator @@ -1826,7 +1826,7 @@ Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE GatherStats: false Select Operator - expressions: UDFToByte('1') (type: tinyint), UDFToShort('20') (type: smallint) + expressions: 1 (type: tinyint), 20 (type: smallint) outputColumnNames: _col0, _col1 Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator @@ -1949,7 +1949,7 @@ Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE GatherStats: false Select Operator - expressions: CAST( '1970-12-31 15:59:58.174' AS TIMESTAMP) (type: timestamp) + expressions: 1970-12-31 15:59:58.174 (type: timestamp) outputColumnNames: _col0 Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator @@ -2072,7 +2072,7 @@ Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE GatherStats: false Select Operator - expressions: CAST( '1970-12-31 15:59:58.174' AS DATE) (type: date) + expressions: null (type: void) outputColumnNames: _col0 Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator Index: ql/src/test/results/clientpositive/udf4.q.out =================================================================== --- ql/src/test/results/clientpositive/udf4.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf4.q.out (working copy) @@ -76,7 +76,7 @@ alias: dest1 Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: round(1.0) (type: double), round(1.5) (type: double), round((- 1.5)) (type: double), floor(1.0) (type: bigint), floor(1.5) (type: bigint), floor((- 1.5)) (type: bigint), sqrt(1.0) (type: double), sqrt((- 1.0)) (type: double), sqrt(0.0) (type: double), ceil(1.0) (type: bigint), ceil(1.5) (type: bigint), ceil((- 1.5)) (type: bigint), ceil(1.0) (type: bigint), rand(3) (type: double), 3 (type: int), (- 3) (type: int), (1 + 2) (type: int), (1 + (- 2)) (type: int), (~ 1) (type: int), (~ UDFToByte(1)) (type: tinyint), (~ UDFToShort(1)) (type: smallint), (~ UDFToLong(1)) (type: bigint), (UDFToByte(1) & UDFToByte(2)) (type: tinyint), (UDFToShort(1) & UDFToShort(2)) (type: smallint), (1 & 2) (type: int), (UDFToLong(1) & UDFToLong(2)) (type: bigint), (UDFToByte(1) | UDFToByte(2)) (type: tinyint), (UDFToShort(1) | UDFToShort(2)) (type: smallint), (1 | 2) (type: int), (UDFToLong(1) | UDFToLong(2)) (type: bigint), (UDFToByte(1) ^ UDFToByte(3)) (type: tinyint), (UDFToShort(1) ^ UDFToShort(3)) (type: smallint), (1 ^ 3) (type: int), (UDFToLong(1) ^ UDFToLong(3)) (type: bigint) + expressions: 1.0 (type: double), 2.0 (type: double), -2.0 (type: double), 1 (type: bigint), 1 (type: bigint), -2 (type: bigint), 1.0 (type: double), null (type: void), 0.0 (type: double), 1 (type: bigint), 2 (type: bigint), -1 (type: bigint), 1 (type: bigint), rand(3) (type: double), 3 (type: int), -3 (type: int), 3 (type: int), -1 (type: int), -2 (type: int), -2 (type: tinyint), -2 (type: smallint), -2 (type: bigint), 0 (type: tinyint), 0 (type: smallint), 0 (type: int), 0 (type: bigint), 3 (type: tinyint), 3 (type: smallint), 3 (type: int), 3 (type: bigint), 2 (type: tinyint), 2 (type: smallint), 2 (type: int), 2 (type: bigint) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33 Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator Index: ql/src/test/results/clientpositive/join38.q.out =================================================================== --- ql/src/test/results/clientpositive/join38.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/join38.q.out (working copy) @@ -63,17 +63,17 @@ a TableScan alias: a - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 111)) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator condition expressions: 0 {value} 1 {col5} keys: - 0 key (type: string) - 1 col11 (type: string) + 0 '111' (type: string) + 1 '111' (type: string) Stage: Stage-1 Map Reduce @@ -91,25 +91,25 @@ 0 {value} 1 {col5} keys: - 0 key (type: string) - 1 col11 (type: string) + 0 '111' (type: string) + 1 '111' (type: string) outputColumnNames: _col1, _col9 - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col9 (type: string) outputColumnNames: _col1, _col9 - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col1 (type: string), _col9 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Local Work: Map Reduce Local Work @@ -119,14 +119,14 @@ keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3 Data size: 660 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 745 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3 Data size: 660 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 745 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 660 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 745 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (working copy) @@ -43,17 +43,14 @@ TableScan alias: src Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE - Filter Operator - predicate: true is not null (type: boolean) - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE - Select Operator - expressions: null is null (type: boolean), 1 is not null (type: boolean), 'my string' is not null (type: boolean) - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE - Limit - Number of rows: 1 - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE - ListSink + Select Operator + expressions: null is null (type: boolean), true (type: boolean), true (type: boolean) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE + Limit + Number of rows: 1 + Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE + ListSink PREHOOK: query: SELECT NULL IS NULL, 1 IS NOT NULL, Index: ql/src/test/results/clientpositive/pcr.q.out =================================================================== --- ql/src/test/results/clientpositive/pcr.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/pcr.q.out (working copy) @@ -1694,11 +1694,11 @@ predicate: (key = 14) (type: boolean) Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) - outputColumnNames: _col0, _col1 + expressions: value (type: string) + outputColumnNames: _col1 Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: int), _col1 (type: string) + key expressions: 14 (type: int), _col1 (type: string) sort order: ++ Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1802,7 +1802,7 @@ Needs Tagging: false Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) + expressions: 14 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -4637,7 +4637,7 @@ predicate: (key = 2) (type: boolean) Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: 2 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -4676,7 +4676,7 @@ predicate: (key = 3) (type: boolean) Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: 3 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -5438,20 +5438,20 @@ Map Operator Tree: TableScan alias: srcpart - Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11624 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (key = 11) (type: boolean) - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string), ds (type: string), hr (type: string) + outputColumnNames: _col1, _col2, _col3 + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string), _col2 (type: string), _col3 (type: string) + key expressions: '11' (type: string), _col2 (type: string), _col3 (type: string) sort order: +++ - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: string) auto parallelism: false @@ -5556,15 +5556,15 @@ Needs Tagging: false Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string) + expressions: '11' (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -5661,20 +5661,20 @@ Map Operator Tree: TableScan alias: srcpart - Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11624 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (key = 11) (type: boolean) - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string), ds (type: string), hr (type: string) + outputColumnNames: _col1, _col2, _col3 + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string), _col2 (type: string), _col3 (type: string) + key expressions: '11' (type: string), _col2 (type: string), _col3 (type: string) sort order: +++ - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: string) auto parallelism: false @@ -5779,15 +5779,15 @@ Needs Tagging: false Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string) + expressions: '11' (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat Index: ql/src/test/results/clientpositive/udf_sign.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_sign.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_sign.q.out (working copy) @@ -17,7 +17,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: sign(0) (type: double) + expressions: 0.0 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink @@ -81,7 +81,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: sign(0) (type: double) + expressions: 0.0 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/index_auto_partitioned.q.out =================================================================== --- ql/src/test/results/clientpositive/index_auto_partitioned.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/index_auto_partitioned.q.out (working copy) @@ -87,17 +87,17 @@ TableScan alias: srcpart filterExpr: ((key = 86) and (ds = '2008-04-09')) (type: boolean) - Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11624 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '86' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/macro.q.out =================================================================== --- ql/src/test/results/clientpositive/macro.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/macro.q.out (working copy) @@ -29,7 +29,7 @@ alias: src Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: SIGMOID(2) (type: double) + expressions: 0.8807970779778823 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit @@ -74,7 +74,7 @@ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE GatherStats: false Select Operator - expressions: SIGMOID(2) (type: double) + expressions: 0.8807970779778823 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit @@ -119,7 +119,7 @@ alias: src Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: (FIXED_NUMBER() + 1) (type: int) + expressions: 2 (type: int) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit @@ -165,7 +165,7 @@ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE GatherStats: false Select Operator - expressions: (FIXED_NUMBER() + 1) (type: int) + expressions: 2 (type: int) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit @@ -237,7 +237,7 @@ alias: src Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: SIMPLE_ADD(1, 9) (type: int) + expressions: 10 (type: int) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit @@ -283,7 +283,7 @@ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE GatherStats: false Select Operator - expressions: SIMPLE_ADD(1, 9) (type: int) + expressions: 10 (type: int) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit Index: ql/src/test/results/clientpositive/constprog_type.q.out =================================================================== --- ql/src/test/results/clientpositive/constprog_type.q.out (revision 0) +++ ql/src/test/results/clientpositive/constprog_type.q.out (revision 0) @@ -0,0 +1,123 @@ +PREHOOK: query: CREATE TABLE dest1(d date, t timestamp) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +POSTHOOK: query: CREATE TABLE dest1(d date, t timestamp) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@dest1 +PREHOOK: query: EXPLAIN +INSERT OVERWRITE TABLE dest1 +SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp) + FROM src tablesample (1 rows) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +INSERT OVERWRITE TABLE dest1 +SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp) + FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 + Stage-4 + Stage-0 depends on stages: Stage-4, Stage-3, Stage-6 + Stage-2 depends on stages: Stage-0 + Stage-3 + Stage-5 + Stage-6 depends on stages: Stage-5 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: 2013-11-17 (type: date), 2011-04-29 20:46:56.4485 (type: timestamp) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + + Stage: Stage-7 + Conditional Operator + + Stage: Stage-4 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + + Stage: Stage-0 + Move Operator + tables: + replace: true + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + + Stage: Stage-2 + Stats-Aggr Operator + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + + Stage: Stage-5 + Map Reduce + Map Operator Tree: + TableScan + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + + Stage: Stage-6 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + +PREHOOK: query: INSERT OVERWRITE TABLE dest1 +SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp) + FROM src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@dest1 +POSTHOOK: query: INSERT OVERWRITE TABLE dest1 +SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp) + FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.d EXPRESSION [] +POSTHOOK: Lineage: dest1.t EXPRESSION [] +PREHOOK: query: SELECT * FROM dest1 +PREHOOK: type: QUERY +PREHOOK: Input: default@dest1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM dest1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@dest1 +#### A masked pattern was here #### +2013-11-17 2011-04-29 20:46:56.4485 Index: ql/src/test/results/clientpositive/cluster.q.out =================================================================== --- ql/src/test/results/clientpositive/cluster.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/cluster.q.out (working copy) @@ -14,28 +14,27 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 10) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '10' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) + expressions: '10' (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -72,28 +71,27 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 20) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) + expressions: '20' (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -130,28 +128,27 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 20) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) + expressions: '20' (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -188,28 +185,27 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 20) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) + expressions: '20' (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -246,28 +242,27 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 20) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) + expressions: '20' (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -304,28 +299,27 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 20) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) + expressions: '20' (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -362,28 +356,27 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 20) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: string) + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator - expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) + expressions: '20' (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -420,28 +413,27 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 20) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) + expressions: '20' (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -479,40 +471,38 @@ Map Operator Tree: TableScan alias: y - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} - 1 {KEY.reducesinkkey0} - outputColumnNames: _col0, _col1, _col4 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + 0 {VALUE._col0} + 1 + outputColumnNames: _col1 + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string) - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + expressions: _col1 (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -528,16 +518,15 @@ key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: string), _col2 (type: string) + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator - expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string) + expressions: '20' (type: string), KEY.reducesinkkey0 (type: string), '20' (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -575,41 +564,39 @@ Map Operator Tree: TableScan alias: y - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} - 1 {KEY.reducesinkkey0} {VALUE._col0} - outputColumnNames: _col0, _col1, _col4, _col5 - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + 0 {VALUE._col0} + 1 {VALUE._col0} + outputColumnNames: _col1, _col5 + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string), _col5 (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + expressions: _col1 (type: string), _col5 (type: string) + outputColumnNames: _col1, _col3 + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -625,16 +612,16 @@ key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: string), _col2 (type: string), _col3 (type: string) + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE + value expressions: _col3 (type: string) Reduce Operator Tree: Select Operator - expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string) + expressions: '20' (type: string), KEY.reducesinkkey0 (type: string), '20' (type: string), VALUE._col2 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -672,41 +659,39 @@ Map Operator Tree: TableScan alias: y - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} - 1 {KEY.reducesinkkey0} {VALUE._col0} - outputColumnNames: _col0, _col1, _col4, _col5 - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + 0 {VALUE._col0} + 1 {VALUE._col0} + outputColumnNames: _col1, _col5 + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string), _col5 (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + expressions: _col1 (type: string), _col5 (type: string) + outputColumnNames: _col1, _col3 + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -719,19 +704,18 @@ Map Operator Tree: TableScan Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string) + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string), _col3 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string) + expressions: '20' (type: string), VALUE._col0 (type: string), '20' (type: string), VALUE._col2 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -769,40 +753,38 @@ Map Operator Tree: TableScan alias: y - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} - 1 {KEY.reducesinkkey0} - outputColumnNames: _col0, _col1, _col4 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + 0 {VALUE._col0} + 1 + outputColumnNames: _col1 + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string) - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + expressions: _col1 (type: string), '20' (type: string) + outputColumnNames: _col1, _col2 + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -815,19 +797,18 @@ Map Operator Tree: TableScan Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), VALUE._col1 (type: string) + expressions: '20' (type: string), VALUE._col0 (type: string), VALUE._col1 (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/list_bucket_dml_9.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_9.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_9.q.out (working copy) @@ -820,7 +820,7 @@ predicate: ((key = '484') and (value = 'val_484')) (type: boolean) Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/select_unquote_or.q.out =================================================================== --- ql/src/test/results/clientpositive/select_unquote_or.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/select_unquote_or.q.out (working copy) @@ -58,7 +58,7 @@ alias: npe_test Statistics: Num rows: 498 Data size: 5290 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((ds > ((2012 - 11) - 31)) or (ds < ((2012 - 12) - 15))) (type: boolean) + predicate: ((ds > 1970) or (ds < 1985)) (type: boolean) Statistics: Num rows: 332 Data size: 3526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string), ds (type: string) Index: ql/src/test/results/clientpositive/index_auto_file_format.q.out =================================================================== --- ql/src/test/results/clientpositive/index_auto_file_format.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/index_auto_file_format.q.out (working copy) @@ -73,17 +73,17 @@ TableScan alias: src filterExpr: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '86' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -189,17 +189,17 @@ TableScan alias: src filterExpr: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '86' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/input_part2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part2.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/input_part2.q.out (working copy) @@ -155,7 +155,7 @@ predicate: ((key < 100) and (ds = '2008-04-08')) (type: boolean) Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: UDFToInteger(key) (type: int), value (type: string), hr (type: string), ds (type: string) + expressions: UDFToInteger(key) (type: int), value (type: string), hr (type: string), '2008-04-08' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -189,7 +189,7 @@ predicate: ((key < 100) and (ds = '2008-04-09')) (type: boolean) Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: UDFToInteger(key) (type: int), value (type: string), hr (type: string), ds (type: string) + expressions: UDFToInteger(key) (type: int), value (type: string), hr (type: string), '2008-04-09' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/create_view.q.out =================================================================== --- ql/src/test/results/clientpositive/create_view.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/create_view.q.out (working copy) @@ -181,17 +181,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 18) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '18' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/infer_const_type.q.out =================================================================== --- ql/src/test/results/clientpositive/infer_const_type.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/infer_const_type.q.out (working copy) @@ -56,12 +56,12 @@ Map Operator Tree: TableScan alias: infertypes - Statistics: Num rows: 0 Data size: 117 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 3 Data size: 117 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((((((ti = 127) and (si = 32767)) and (i = 12345)) and (bi = -12345)) and (fl = 906.0)) and (db = -307.0)) and (str = 1234)) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), str (type: string) + expressions: 127 (type: tinyint), 32767 (type: smallint), 12345 (type: int), -12345 (type: bigint), 906.0 (type: float), -307.0 (type: double), '1234' (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -135,7 +135,7 @@ alias: infertypes Statistics: Num rows: 0 Data size: 117 Basic stats: PARTIAL Column stats: NONE Filter Operator - predicate: (((((false or false) or false) or false) or false) or false) (type: boolean) + predicate: false (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), str (type: string) @@ -199,7 +199,7 @@ alias: infertypes Statistics: Num rows: 0 Data size: 117 Basic stats: PARTIAL Column stats: NONE Filter Operator - predicate: ((false or false) or false) (type: boolean) + predicate: false (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), str (type: string) @@ -253,12 +253,12 @@ Map Operator Tree: TableScan alias: infertypes - Statistics: Num rows: 0 Data size: 117 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 3 Data size: 117 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((ti < 127.0) and (i > 100.0)) and (str = 1.57)) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), str (type: string) + expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), '1.57' (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/list_bucket_dml_4.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_4.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_4.q.out (working copy) @@ -820,7 +820,7 @@ predicate: ((key = '484') and (value = 'val_484')) (type: boolean) Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/list_bucket_dml_12.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_12.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_12.q.out (working copy) @@ -331,7 +331,7 @@ predicate: ((col2 = '466') and (col4 = 'val_466')) (type: boolean) Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: col1 (type: string), col2 (type: string), col3 (type: string), col4 (type: string), col5 (type: string), ds (type: string), hr (type: string) + expressions: col1 (type: string), '466' (type: string), col3 (type: string), 'val_466' (type: string), col5 (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -489,7 +489,7 @@ predicate: ((col2 = '382') and (col4 = 'val_382')) (type: boolean) Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: col1 (type: string), col2 (type: string), col3 (type: string), col4 (type: string), col5 (type: string), ds (type: string), hr (type: string) + expressions: col1 (type: string), '382' (type: string), col3 (type: string), 'val_382' (type: string), col5 (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/udf_radians.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_radians.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_radians.q.out (working copy) @@ -17,7 +17,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: radians(57.2958) (type: double) + expressions: 1.000000357564167 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink @@ -73,7 +73,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: radians(57.2958) (type: double) + expressions: 1.000000357564167 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/subquery_notin_having.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_notin_having.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_notin_having.q.out (working copy) @@ -176,7 +176,7 @@ outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 69 Data size: 7032 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col2 is null) (type: boolean) + predicate: _col2 is null (type: boolean) Statistics: Num rows: 34 Data size: 3465 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: bigint) @@ -354,7 +354,7 @@ outputColumnNames: _col0, _col1, _col4 Statistics: Num rows: 34 Data size: 3839 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col4 is null) (type: boolean) + predicate: _col4 is null (type: boolean) Statistics: Num rows: 17 Data size: 1919 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: double) @@ -646,7 +646,7 @@ outputColumnNames: _col0, _col1, _col3 Statistics: Num rows: 34 Data size: 3839 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col3 is null) (type: boolean) + predicate: _col3 is null (type: boolean) Statistics: Num rows: 17 Data size: 1919 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: double) @@ -670,7 +670,7 @@ predicate: p_mfgr is null (type: boolean) Statistics: Num rows: 14 Data size: 1531 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: p_mfgr (type: string), p_retailprice (type: double) + expressions: null (type: void), p_retailprice (type: double) outputColumnNames: p_mfgr, p_retailprice Statistics: Num rows: 14 Data size: 1531 Basic stats: COMPLETE Column stats: NONE Group By Operator Index: ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out (working copy) @@ -240,7 +240,7 @@ predicate: (x = 484) (type: boolean) Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: x (type: int) + expressions: 484 (type: int) outputColumnNames: _col0 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -757,7 +757,7 @@ predicate: (x = 484) (type: boolean) Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: x (type: int) + expressions: 484 (type: int) outputColumnNames: x Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator Index: ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out (working copy) @@ -169,7 +169,7 @@ predicate: (value = 'val_484') (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: key (type: string), 'val_484' (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -368,7 +368,7 @@ predicate: (key = '406') (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string) + expressions: '406' (type: string) outputColumnNames: _col0 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/udf_like.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_like.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_like.q.out (working copy) @@ -33,14 +33,14 @@ Processor Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: ('_%_' like '%\_\%\_%') (type: boolean), ('__' like '%\_\%\_%') (type: boolean), ('%%_%_' like '%\_\%\_%') (type: boolean), ('%_%_%' like '%\%\_\%') (type: boolean), ('_%_' like '\%\_%') (type: boolean), ('%__' like '__\%%') (type: boolean), ('_%' like '\_\%\_\%%') (type: boolean), ('_%' like '\_\%_%') (type: boolean), ('%_' like '\%\_') (type: boolean), ('ab' like '\%\_') (type: boolean), ('ab' like '_a%') (type: boolean), ('ab' like 'a') (type: boolean), ('ab' like '') (type: boolean), ('' like '') (type: boolean) + expressions: true (type: boolean), false (type: boolean), true (type: boolean), true (type: boolean), false (type: boolean), false (type: boolean), false (type: boolean), false (type: boolean), true (type: boolean), false (type: boolean), false (type: boolean), false (type: boolean), false (type: boolean), true (type: boolean) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: SELECT '_%_' LIKE '%\_\%\_%', '__' LIKE '%\_\%\_%', '%%_%_' LIKE '%\_\%\_%', '%_%_%' LIKE '%\%\_\%', Index: ql/src/test/results/clientpositive/udf_hour.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_hour.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_hour.q.out (working copy) @@ -32,14 +32,14 @@ Processor Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: hour('2009-08-07 13:14:15') (type: int), hour('13:14:15') (type: int), hour('2009-08-07') (type: int) + expressions: 13 (type: int), 13 (type: int), null (type: void) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: SELECT hour('2009-08-07 13:14:15'), hour('13:14:15'), hour('2009-08-07') Index: ql/src/test/results/clientpositive/query_result_fileformat.q.out =================================================================== --- ql/src/test/results/clientpositive/query_result_fileformat.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/query_result_fileformat.q.out (working copy) @@ -53,7 +53,7 @@ predicate: (key = 'key1') (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: 'key1' (type: string), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -124,7 +124,7 @@ predicate: (key = 'key1') (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: 'key1' (type: string), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/groupby_sort_1.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/groupby_sort_1.q.out (working copy) @@ -470,6 +470,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -1873,6 +1874,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col3 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -2093,6 +2095,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -2359,6 +2362,7 @@ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -3139,6 +3143,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -3760,52 +3765,62 @@ alias: t1 Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE GatherStats: false - Select Operator - expressions: key (type: string) - outputColumnNames: key - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: count(1) - keys: key (type: string) - mode: final - outputColumnNames: _col0, _col1 + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: key Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: bigint) + Group By Operator + aggregations: count(1) + keys: key (type: string) + mode: final outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - tag: 0 - value expressions: _col0 (type: string), _col1 (type: bigint) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col1 (type: bigint) + auto parallelism: false TableScan alias: t1 Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE GatherStats: false - Select Operator - expressions: key (type: string) - outputColumnNames: key - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: count(1) - keys: key (type: string) - mode: final - outputColumnNames: _col0, _col1 + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: key Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: bigint) + Group By Operator + aggregations: count(1) + keys: key (type: string) + mode: final outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - tag: 1 - value expressions: _col1 (type: bigint) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + tag: 1 + value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -3865,20 +3880,20 @@ condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE._col0} {VALUE._col1} - 1 {VALUE._col1} + 0 {KEY.reducesinkkey0} {VALUE._col0} + 1 {VALUE._col0} outputColumnNames: _col0, _col1, _col3 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToInteger(_col0) (type: int), UDFToInteger((_col1 + _col3)) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -4070,23 +4085,28 @@ alias: t1 Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE GatherStats: false - Select Operator - expressions: key (type: string), val (type: string) - outputColumnNames: key, val - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: count(1) - keys: key (type: string), val (type: string) - mode: hash - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string), _col1 (type: string) - sort order: ++ - Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - tag: -1 - value expressions: _col2 (type: bigint) + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), val (type: string) + outputColumnNames: key, val + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(1) + keys: key (type: string), val (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string) + sort order: ++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + tag: -1 + value expressions: _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -4147,11 +4167,11 @@ keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -4177,36 +4197,42 @@ alias: t1 Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE GatherStats: false - Select Operator - expressions: key (type: string) - outputColumnNames: key - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: count(1) - keys: key (type: string) - mode: final - outputColumnNames: _col0, _col1 + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: key Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: bigint) + Group By Operator + aggregations: count(1) + keys: key (type: string) + mode: final outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - tag: 0 - value expressions: _col0 (type: string), _col1 (type: bigint) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col1 (type: bigint) + auto parallelism: false TableScan GatherStats: false Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE tag: 1 - value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) + value expressions: _col1 (type: string), _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -4287,20 +4313,20 @@ condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE._col0} {VALUE._col1} - 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} + 0 {KEY.reducesinkkey0} {VALUE._col0} + 1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: string), _col3 (type: string), _col4 (type: bigint) outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -4410,6 +4436,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -6435,11 +6462,11 @@ predicate: (key = 8) (type: boolean) Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), val (type: string) - outputColumnNames: _col0, _col1 + expressions: val (type: string) + outputColumnNames: _col1 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string) + expressions: '8' (type: string) outputColumnNames: _col0 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -6456,7 +6483,7 @@ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Select Operator - expressions: _col0 (type: string), _col1 (type: string) + expressions: '8' (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator Index: ql/src/test/results/clientpositive/udf_if.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_if.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_if.q.out (working copy) @@ -39,7 +39,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: if(true, 1, 2) (type: int), if(false, UDFToString(null), UDFToString(1)) (type: string), if((1 = 1), if((2 = 2), 1, 2), if((3 = 3), 3, 4)) (type: int), if((2 = 2), 1, null) (type: int), if((2 = 2), null, 1) (type: int), if(if(true, null, false), 1, 2) (type: int) + expressions: 1 (type: int), if(false, UDFToString(null), '1') (type: string), 1 (type: int), if(true, 1, null) (type: int), if(true, null, 1) (type: int), if(if(true, null, false), 1, 2) (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink @@ -94,7 +94,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: if(true, UDFToShort(128), UDFToByte(1)) (type: smallint), if(false, 1, 1.1) (type: double), if(false, 1, 'ABC') (type: string), if(false, 'ABC', 12.3) (type: string) + expressions: 128 (type: smallint), 1.1 (type: double), 'ABC' (type: string), '12.3' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/udf_format_number.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_format_number.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_format_number.q.out (working copy) @@ -42,7 +42,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: format_number(12332.123456, 4) (type: string), format_number(12332.1, 4) (type: string), format_number(12332.2, 0) (type: string) + expressions: '12,332.1235' (type: string), '12,332.1000' (type: string), '12,332' (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/udf_reflect2.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_reflect2.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_reflect2.q.out (working copy) @@ -320,7 +320,7 @@ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator - expressions: UDFToInteger(key) (type: int), value (type: string), CAST( '2013-02-15 19:41:20' AS TIMESTAMP) (type: timestamp) + expressions: UDFToInteger(key) (type: int), value (type: string), 2013-02-15 19:41:20.0 (type: timestamp) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator Index: ql/src/test/results/clientpositive/cross_product_check_2.q.out =================================================================== --- ql/src/test/results/clientpositive/cross_product_check_2.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/cross_product_check_2.q.out (working copy) @@ -244,23 +244,20 @@ 1 key (type: string) outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -384,23 +381,20 @@ 1 outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -700,23 +694,20 @@ 1 key (type: string) outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Index: ql/src/test/results/clientpositive/udf_rpad.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_rpad.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_rpad.q.out (working copy) @@ -38,7 +38,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: rpad('hi', 1, '?') (type: string), rpad('hi', 5, '.') (type: string), rpad('hi', 6, '123') (type: string) + expressions: 'h' (type: string), 'hi...' (type: string), 'hi1231' (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/combine2_hadoop20.q.out =================================================================== --- ql/src/test/results/clientpositive/combine2_hadoop20.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/combine2_hadoop20.q.out (working copy) @@ -208,6 +208,7 @@ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE tag: -1 value expressions: _col0 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: Index: ql/src/test/results/clientpositive/regex_col.q.out =================================================================== --- ql/src/test/results/clientpositive/regex_col.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/regex_col.q.out (working copy) @@ -179,26 +179,26 @@ Map Operator Tree: TableScan alias: b - Statistics: Num rows: 232 Data size: 23248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7748 Data size: 23248 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key = 103) and key is not null) (type: boolean) - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1937 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string), hr (type: string), ds (type: string) + key expressions: '103' (type: string), hr (type: string), ds (type: string) sort order: +++ - Map-reduce partition columns: key (type: string), hr (type: string), ds (type: string) - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Map-reduce partition columns: hr (type: string), ds (type: string) + Statistics: Num rows: 1937 Data size: 5812 Basic stats: COMPLETE Column stats: NONE TableScan alias: a - Statistics: Num rows: 232 Data size: 23248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7748 Data size: 23248 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key = 103) and key is not null) (type: boolean) - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1937 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string), hr (type: string), ds (type: string) + key expressions: '103' (type: string), hr (type: string), ds (type: string) sort order: +++ - Map-reduce partition columns: key (type: string), hr (type: string), ds (type: string) - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Map-reduce partition columns: hr (type: string), ds (type: string) + Statistics: Num rows: 1937 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -207,11 +207,11 @@ 0 1 {KEY.reducesinkkey2} {KEY.reducesinkkey1} outputColumnNames: _col8, _col9 - Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2130 Data size: 6393 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col8 (type: string), _col9 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2130 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -226,15 +226,15 @@ Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ - Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2130 Data size: 6393 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2130 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 63 Data size: 6393 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2130 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/udf_ascii.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_ascii.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_ascii.q.out (working copy) @@ -38,7 +38,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: ascii('Facebook') (type: int), ascii('') (type: int), ascii('!') (type: int) + expressions: 70 (type: int), 0 (type: int), 33 (type: int) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/udf6.q.out =================================================================== --- ql/src/test/results/clientpositive/udf6.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf6.q.out (working copy) @@ -32,7 +32,7 @@ alias: dest1 Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: if(true, 1, 2) (type: int) + expressions: 1 (type: int) outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator @@ -86,7 +86,7 @@ alias: dest1 Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: if(true, 1, 2) (type: int), if(false, 1, 2) (type: int), if(null, 1, 2) (type: int), if(true, 'a', 'b') (type: string), if(true, 0.1, 0.2) (type: double), if(false, UDFToLong(1), UDFToLong(2)) (type: bigint), if(false, UDFToByte(127), UDFToByte(126)) (type: tinyint), if(false, UDFToShort(127), UDFToShort(128)) (type: smallint), 128 (type: int), 1.0 (type: double), '128' (type: string) + expressions: 1 (type: int), 2 (type: int), if(null, 1, 2) (type: int), 'a' (type: string), 0.1 (type: double), 2 (type: bigint), 126 (type: tinyint), 128 (type: smallint), 128 (type: int), 1.0 (type: double), '128' (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator Index: ql/src/test/results/clientpositive/udf_hash.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_hash.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_hash.q.out (working copy) @@ -37,7 +37,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: hash(UDFToByte(1)) (type: int), hash(UDFToShort(2)) (type: int), hash(3) (type: int), hash(UDFToLong('123456789012')) (type: int), hash(UDFToFloat(1.25)) (type: int), hash(16.0) (type: int), hash('400') (type: int), hash('abc') (type: int), hash(true) (type: int), hash(false) (type: int), hash(1,2,3) (type: int) + expressions: 1 (type: int), 2 (type: int), 3 (type: int), -1097262584 (type: int), 1067450368 (type: int), 1076887552 (type: int), 51508 (type: int), 96354 (type: int), 1 (type: int), 0 (type: int), 1026 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/ppd_constant_where.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_constant_where.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/ppd_constant_where.q.out (working copy) @@ -17,20 +17,17 @@ TableScan alias: srcpart Statistics: Num rows: 0 Data size: 11624 Basic stats: PARTIAL Column stats: COMPLETE - Filter Operator - predicate: ('a' = 'a') (type: boolean) - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE - Select Operator - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE - Group By Operator - aggregations: count() - mode: hash - outputColumnNames: _col0 + Select Operator + Statistics: Num rows: 0 Data size: 11624 Basic stats: PARTIAL Column stats: COMPLETE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: bigint) + value expressions: _col0 (type: bigint) Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) Index: ql/src/test/results/clientpositive/udf1.q.out =================================================================== --- ql/src/test/results/clientpositive/udf1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf1.q.out (working copy) @@ -47,17 +47,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: ('a' like '%a%') (type: boolean), ('b' like '%a%') (type: boolean), ('ab' like '%a%') (type: boolean), ('ab' like '%a_') (type: boolean), ('%_' like '\%\_') (type: boolean), ('ab' like '\%\_') (type: boolean), ('ab' like '_a%') (type: boolean), ('ab' like 'a') (type: boolean), ('' rlike '.*') (type: boolean), ('a' rlike '[ab]') (type: boolean), ('' rlike '[ab]') (type: boolean), ('hadoop' rlike '[a-z]*') (type: boolean), ('hadoop' rlike 'o*') (type: boolean), regexp_replace('abc', 'b', 'c') (type: string), regexp_replace('abc', 'z', 'a') (type: string), regexp_replace('abbbb', 'bb', 'b') (type: string), regexp_replace('hadoop', '(.)[a-z]*', '$1ive') (type: string), regexp_replace('hadoopAAA', 'A.*', '') (type: string), regexp_replace('abc', '', 'A') (type: string), ('abc' rlike '') (type: boolean) + expressions: true (type: boolean), false (type: boolean), true (type: boolean), true (type: boolean), true (type: boolean), false (type: boolean), false (type: boolean), false (type: boolean), true (type: boolean), true (type: boolean), false (type: boolean), true (type: boolean), true (type: boolean), 'acc' (type: string), 'abc' (type: string), 'abb' (type: string), 'hive' (type: string), 'hadoop' (type: string), 'AaAbAcA' (type: string), false (type: boolean) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/input_part4.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part4.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/input_part4.q.out (working copy) @@ -19,7 +19,7 @@ predicate: ((ds = '2008-04-08') and (hr = 15)) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: key (type: string), value (type: string), '2008-04-08' (type: string), '15' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE ListSink Index: ql/src/test/results/clientpositive/mapjoin1.q.out =================================================================== --- ql/src/test/results/clientpositive/mapjoin1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/mapjoin1.q.out (working copy) @@ -343,16 +343,13 @@ TableScan alias: a Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: true (type: boolean) - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE - HashTable Sink Operator - condition expressions: - 0 {value} - 1 {key} {value} - keys: - 0 key (type: string) - 1 key (type: string) + HashTable Sink Operator + condition expressions: + 0 {value} + 1 {key} {value} + keys: + 0 key (type: string) + 1 key (type: string) Stage: Stage-1 Map Reduce @@ -360,34 +357,31 @@ TableScan alias: b Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: true (type: boolean) - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE - Map Join Operator - condition map: - Right Outer Join0 to 1 - condition expressions: - 0 {key} {value} - 1 {key} {value} - keys: - 0 key (type: string) - 1 key (type: string) - outputColumnNames: _col0, _col1, _col4, _col5 + Map Join Operator + condition map: + Right Outer Join0 to 1 + condition expressions: + 0 {key} {value} + 1 {key} {value} + keys: + 0 key (type: string) + 1 key (type: string) + outputColumnNames: _col0, _col1, _col4, _col5 + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string), _col5 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string), _col5 (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 10 + Limit + Number of rows: 10 + Statistics: Num rows: 10 Data size: 2060 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false Statistics: Num rows: 10 Data size: 2060 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 10 Data size: 2060 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Local Work: Map Reduce Local Work Index: ql/src/test/results/clientpositive/union20.q.out =================================================================== --- ql/src/test/results/clientpositive/union20.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/union20.q.out (working copy) @@ -37,20 +37,17 @@ TableScan alias: s3 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE - Filter Operator - predicate: 'tst1' is not null (type: boolean) - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE - Select Operator - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE - Group By Operator - aggregations: count(1) - mode: hash - outputColumnNames: _col0 + Select Operator + Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE + Group By Operator + aggregations: count(1) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: bigint) + value expressions: _col0 (type: bigint) Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -152,20 +149,17 @@ TableScan alias: s1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE - Filter Operator - predicate: 'tst1' is not null (type: boolean) - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE - Select Operator - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE - Group By Operator - aggregations: count(1) - mode: hash - outputColumnNames: _col0 + Select Operator + Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE + Group By Operator + aggregations: count(1) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: bigint) + value expressions: _col0 (type: bigint) Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) Index: ql/src/test/results/clientpositive/column_access_stats.q.out =================================================================== --- ql/src/test/results/clientpositive/column_access_stats.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/column_access_stats.q.out (working copy) @@ -531,7 +531,6 @@ sort order: + Map-reduce partition columns: key (type: string) Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE - value expressions: val (type: string) TableScan alias: t1 Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE @@ -543,18 +542,17 @@ sort order: + Map-reduce partition columns: key (type: string) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - value expressions: val (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} - 1 {KEY.reducesinkkey0} {VALUE._col0} - outputColumnNames: _col0, _col1, _col4, _col5 + 0 {KEY.reducesinkkey0} + 1 {KEY.reducesinkkey0} + outputColumnNames: _col0, _col4 Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string), _col5 (type: string) + expressions: _col0 (type: string), '3' (type: string), _col4 (type: string), '3' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/udf_when.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_when.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_when.q.out (working copy) @@ -77,7 +77,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: CASE WHEN ((1 = 1)) THEN (2) WHEN ((1 = 3)) THEN (4) ELSE (5) END (type: int), CASE WHEN ((6 = 7)) THEN (8) ELSE (9) END (type: int), CASE WHEN ((10 = 11)) THEN (12) WHEN ((13 = 13)) THEN (14) END (type: int), CASE WHEN ((15 = 16)) THEN (17) WHEN ((18 = 19)) THEN (20) END (type: int), CASE WHEN ((21 = 22)) THEN (null) WHEN ((23 = 23)) THEN (24) END (type: int), CASE WHEN ((25 = 26)) THEN (27) WHEN ((28 = 28)) THEN (null) END (type: int) + expressions: 2 (type: int), 9 (type: int), 14 (type: int), null (type: void), CASE WHEN (false) THEN (null) WHEN (true) THEN (24) END (type: int), CASE WHEN (false) THEN (27) WHEN (true) THEN (null) END (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/smb_mapjoin_25.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_25.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/smb_mapjoin_25.q.out (working copy) @@ -66,9 +66,8 @@ predicate: (key is not null and (key = 5)) (type: boolean) Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: int) + key expressions: 5 (type: int) sort order: + - Map-reduce partition columns: key (type: int) Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE TableScan alias: a @@ -77,21 +76,19 @@ predicate: (key is not null and (key = 5)) (type: boolean) Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: int) + key expressions: 5 (type: int) sort order: + - Map-reduce partition columns: key (type: int) Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} + 0 1 - outputColumnNames: _col0 Statistics: Num rows: 14 Data size: 57 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int) + expressions: 5 (type: int) outputColumnNames: _col0 Statistics: Num rows: 14 Data size: 57 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -129,7 +126,7 @@ predicate: (_col1 = 5) (type: boolean) Statistics: Num rows: 8 Data size: 33 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: int) + expressions: _col0 (type: int), 5 (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 33 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -150,9 +147,8 @@ predicate: (key is not null and (key = 5)) (type: boolean) Statistics: Num rows: 14 Data size: 56 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: int) + key expressions: 5 (type: int) sort order: + - Map-reduce partition columns: key (type: int) Statistics: Num rows: 14 Data size: 56 Basic stats: COMPLETE Column stats: NONE TableScan alias: c @@ -161,21 +157,19 @@ predicate: (key is not null and (key = 5)) (type: boolean) Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: int) + key expressions: 5 (type: int) sort order: + - Map-reduce partition columns: key (type: int) Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} + 0 1 - outputColumnNames: _col0 Statistics: Num rows: 15 Data size: 61 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int) + expressions: 5 (type: int) outputColumnNames: _col0 Statistics: Num rows: 15 Data size: 61 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/list_bucket_dml_6.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_6.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_6.q.out (working copy) @@ -982,7 +982,7 @@ predicate: ((key = '484') and (value = 'val_484')) (type: boolean) Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/index_stale_partitioned.q.out =================================================================== --- ql/src/test/results/clientpositive/index_stale_partitioned.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/index_stale_partitioned.q.out (working copy) @@ -92,7 +92,7 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), val (type: string), foo (type: string) + expressions: '86' (type: string), val (type: string), foo (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/list_bucket_dml_14.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_14.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_14.q.out (working copy) @@ -291,7 +291,7 @@ predicate: (key = '484') (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '484' (type: string), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/union_remove_19.q.out =================================================================== --- ql/src/test/results/clientpositive/union_remove_19.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/union_remove_19.q.out (working copy) @@ -265,25 +265,25 @@ Map Operator Tree: TableScan alias: inputtbl1 - Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 30 Data size: 30 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 7) (type: boolean) - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 15 Data size: 15 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string) + expressions: '7' (type: string) outputColumnNames: key - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 15 Data size: 15 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 15 Data size: 15 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 15 Data size: 15 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -291,14 +291,14 @@ keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 7 Data size: 7 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 7 Data size: 7 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 7 Data size: 7 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -320,25 +320,25 @@ Map Operator Tree: TableScan alias: inputtbl1 - Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 30 Data size: 30 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 7) (type: boolean) - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 15 Data size: 15 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string) + expressions: '7' (type: string) outputColumnNames: key - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 15 Data size: 15 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 15 Data size: 15 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 15 Data size: 15 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -346,14 +346,14 @@ keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 7 Data size: 7 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 7 Data size: 7 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Statistics: Num rows: 7 Data size: 7 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/select_unquote_not.q.out =================================================================== --- ql/src/test/results/clientpositive/select_unquote_not.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/select_unquote_not.q.out (working copy) @@ -58,7 +58,7 @@ alias: npe_test Statistics: Num rows: 498 Data size: 5290 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (not (ds < ((2012 - 11) - 31))) (type: boolean) + predicate: (not (ds < 1970)) (type: boolean) Statistics: Num rows: 332 Data size: 3526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string), ds (type: string) Index: ql/src/test/results/clientpositive/udf_lower.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_lower.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_lower.q.out (working copy) @@ -28,17 +28,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: lower('AbC 123') (type: string), upper('AbC 123') (type: string) + expressions: 'abc 123' (type: string), 'ABC 123' (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/list_bucket_dml_1.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_1.q.out (working copy) @@ -448,7 +448,7 @@ predicate: (key = '484') (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '484' (type: string), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out (working copy) @@ -14,24 +14,24 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 9999) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: value - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1), count(DISTINCT value), count(DISTINCT substr(value, 5)) keys: value (type: string), substr(value, 5) (type: string) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Reduce Operator Tree: Group By Operator @@ -82,18 +82,18 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 9999) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: value - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string), substr(value, 5) (type: string) sort order: ++ - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator aggregations: count(1), count(DISTINCT KEY._col0:0._col0), count(DISTINCT KEY._col0:1._col0) Index: ql/src/test/results/clientpositive/udf_between.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_between.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_between.q.out (working copy) @@ -24,7 +24,7 @@ alias: src Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (key + 100) BETWEEN (150 + (- 50)) AND (150 + 50) (type: boolean) + predicate: (key + 100) BETWEEN 100 AND 200 (type: boolean) Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) @@ -79,7 +79,7 @@ alias: src Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (key + 100) NOT BETWEEN (150 + (- 50)) AND (150 + 50) (type: boolean) + predicate: (key + 100) NOT BETWEEN 100 AND 200 (type: boolean) Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) @@ -133,17 +133,14 @@ TableScan alias: src Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: 'b' BETWEEN 'a' AND 'c' (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 1 - Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE - ListSink + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 1 + Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE + ListSink PREHOOK: query: SELECT * FROM src where 'b' between 'a' AND 'c' LIMIT 1 PREHOOK: type: QUERY @@ -169,17 +166,14 @@ TableScan alias: src Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: 2 BETWEEN 2 AND '3' (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: key (type: string), value (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 1 - Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE - ListSink + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 1 + Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE + ListSink PREHOOK: query: SELECT * FROM src where 2 between 2 AND '3' LIMIT 1 PREHOOK: type: QUERY Index: ql/src/test/results/clientpositive/subquery_in.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_in.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_in.q.out (working copy) @@ -157,20 +157,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -269,20 +266,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -447,20 +441,17 @@ 1 outputColumnNames: _col1, _col5 Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col1 (type: string), _col5 (type: int) + outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col1 (type: string), _col5 (type: int) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -618,20 +609,17 @@ 1 outputColumnNames: _col1, _col2, _col5 Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) - outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -763,20 +751,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -947,24 +932,24 @@ value expressions: _col0 (type: int), _col3 (type: int) TableScan alias: lineitem - Statistics: Num rows: 116 Data size: 12099 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1728 Data size: 12099 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((l_shipmode = 'AIR') and l_orderkey is not null) (type: boolean) - Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: l_orderkey (type: int) outputColumnNames: _col0 - Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out (working copy) @@ -6945,11 +6945,11 @@ predicate: (key = 8) (type: boolean) Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), val (type: string) - outputColumnNames: _col0, _col1 + expressions: val (type: string) + outputColumnNames: _col1 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string) + expressions: '8' (type: string) outputColumnNames: _col0 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -6966,7 +6966,7 @@ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Select Operator - expressions: _col0 (type: string), _col1 (type: string) + expressions: '8' (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator Index: ql/src/test/results/clientpositive/udf_space.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_space.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_space.q.out (working copy) @@ -40,7 +40,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: space(10) (type: string), space(0) (type: string), space(1) (type: string), space((- 1)) (type: string), space((- 100)) (type: string) + expressions: ' ' (type: string), '' (type: string), ' ' (type: string), '' (type: string), '' (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/subquery_in_having.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_in_having.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_in_having.q.out (working copy) @@ -136,20 +136,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint) + outputColumnNames: _col0, _col1 Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: bigint) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-3 Map Reduce @@ -343,20 +340,17 @@ 1 outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) - outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-3 Map Reduce @@ -510,20 +504,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: double) + outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: double) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-3 Map Reduce @@ -677,20 +668,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: double) + outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: double) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Group By Operator aggregations: max(VALUE._col0), min(VALUE._col1) keys: KEY._col0 (type: string) @@ -714,20 +702,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: double) + outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: double) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -841,25 +826,22 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + Group By Operator + aggregations: count() + keys: _col0 (type: string), _col1 (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: count() - keys: _col0 (type: string), _col1 (type: string) - mode: hash - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe Stage: Stage-2 Map Reduce @@ -913,20 +895,17 @@ 1 outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) - outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-5 Map Reduce @@ -1109,17 +1088,15 @@ 0 _col2 (type: bigint) 1 _col0 (type: bigint) outputColumnNames: _col0, _col1, _col2 - Filter Operator - predicate: (1 = 1) (type: boolean) - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Local Work: Map Reduce Local Work @@ -1148,20 +1125,17 @@ 1 outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) - outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-10 Map Reduce Local Work @@ -1214,25 +1188,22 @@ 1 _col0 (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + Group By Operator + aggregations: count() + keys: _col0 (type: string), _col1 (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: count() - keys: _col0 (type: string), _col1 (type: string) - mode: hash - outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string) + sort order: ++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string), _col1 (type: string) - sort order: ++ - Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - value expressions: _col2 (type: bigint) + value expressions: _col2 (type: bigint) Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -1354,17 +1325,15 @@ 0 _col1 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2 - Filter Operator - predicate: (1 = 1) (type: boolean) - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double) - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double) + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Local Work: Map Reduce Local Work @@ -1393,20 +1362,17 @@ 1 outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 8 Data size: 1861 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double) + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 8 Data size: 1861 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double) - outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false Statistics: Num rows: 8 Data size: 1861 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 8 Data size: 1861 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-3 Map Reduce Index: ql/src/test/results/clientpositive/udf_lpad.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_lpad.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_lpad.q.out (working copy) @@ -38,7 +38,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: lpad('hi', 1, '?') (type: string), lpad('hi', 5, '.') (type: string), lpad('hi', 6, '123') (type: string) + expressions: 'h' (type: string), '...hi' (type: string), '1231hi' (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/ppd_join4.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join4.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/ppd_join4.q.out (working copy) @@ -58,18 +58,13 @@ predicate: ((id is not null and (name = 'c')) and (id = 'a')) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: id (type: string), name (type: string) - outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: 'a' (type: string) sort order: + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) - outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator compressed: false @@ -88,28 +83,24 @@ predicate: (id is not null and (id = 'a')) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Reduce Output Operator - key expressions: id (type: string) + key expressions: 'a' (type: string) sort order: + - Map-reduce partition columns: id (type: string) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE TableScan Reduce Output Operator - key expressions: _col0 (type: string) + key expressions: 'a' (type: string) sort order: + - Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} + 0 1 - outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string) + expressions: 'a' (type: string), 'c' (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/input18.q.out =================================================================== --- ql/src/test/results/clientpositive/input18.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/input18.q.out (working copy) @@ -36,7 +36,7 @@ alias: src Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), (1 + 2) (type: int), (3 + 4) (type: int) + expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Transform Operator Index: ql/src/test/results/clientpositive/select_dummy_source.q.out =================================================================== --- ql/src/test/results/clientpositive/select_dummy_source.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/select_dummy_source.q.out (working copy) @@ -64,7 +64,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: (1 + 1) (type: int) + expressions: 2 (type: int) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE File Output Operator @@ -193,7 +193,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: (1 + 1) (type: int) + expressions: 2 (type: int) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE ListSink @@ -277,7 +277,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: (2 + 3) (type: int), (1 + 2) (type: int) + expressions: 5 (type: int), (1 + 2) (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE File Output Operator Index: ql/src/test/results/clientpositive/tez/transform_ppr1.q.out =================================================================== --- ql/src/test/results/clientpositive/tez/transform_ppr1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/tez/transform_ppr1.q.out (working copy) @@ -135,7 +135,7 @@ Map-reduce partition columns: _col1 (type: string) Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE tag: -1 - value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) + value expressions: '2008-04-08' (type: string), _col1 (type: string), _col2 (type: string) auto parallelism: true Path -> Alias: #### A masked pattern was here #### Index: ql/src/test/results/clientpositive/tez/subquery_in.q.out =================================================================== --- ql/src/test/results/clientpositive/tez/subquery_in.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/tez/subquery_in.q.out (working copy) @@ -165,20 +165,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -285,20 +282,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -451,20 +445,17 @@ 1 outputColumnNames: _col1, _col5 Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col1 (type: string), _col5 (type: int) + outputColumnNames: _col0, _col1 Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col1 (type: string), _col5 (type: int) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -560,20 +551,17 @@ 1 outputColumnNames: _col1, _col2, _col5 Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) - outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Reducer 4 Reduce Operator Tree: Extract @@ -732,20 +720,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Reducer 4 Reduce Operator Tree: Group By Operator @@ -894,24 +879,24 @@ Map Operator Tree: TableScan alias: lineitem - Statistics: Num rows: 116 Data size: 12099 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1728 Data size: 12099 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((l_shipmode = 'AIR') and l_orderkey is not null) (type: boolean) - Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: l_orderkey (type: int) outputColumnNames: _col0 - Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator Index: ql/src/test/results/clientpositive/tez/insert1.q.out =================================================================== --- ql/src/test/results/clientpositive/tez/insert1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/tez/insert1.q.out (working copy) @@ -47,10 +47,10 @@ alias: a Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Filter Operator - predicate: (key = (- 1)) (type: boolean) + predicate: (key = -1) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: -1 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -99,10 +99,10 @@ alias: a Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Filter Operator - predicate: (key = (- 1)) (type: boolean) + predicate: (key = -1) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: -1 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -164,10 +164,10 @@ alias: a Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Filter Operator - predicate: (key = (- 1)) (type: boolean) + predicate: (key = -1) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: -1 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -216,10 +216,10 @@ alias: a Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Filter Operator - predicate: (key = (- 1)) (type: boolean) + predicate: (key = -1) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: -1 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out =================================================================== --- ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out (working copy) @@ -245,23 +245,20 @@ 1 outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator @@ -360,23 +357,20 @@ 1 outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator @@ -498,23 +492,20 @@ 1 outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator Index: ql/src/test/results/clientpositive/tez/subquery_exists.q.out =================================================================== --- ql/src/test/results/clientpositive/tez/subquery_exists.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/tez/subquery_exists.q.out (working copy) @@ -74,20 +74,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out =================================================================== --- ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out (working copy) @@ -208,23 +208,20 @@ 1 key (type: string) outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan @@ -322,23 +319,20 @@ 1 outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan @@ -437,23 +431,20 @@ 1 key (type: string) outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan Index: ql/src/test/results/clientpositive/subquery_notexists.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_notexists.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_notexists.q.out (working copy) @@ -59,7 +59,7 @@ outputColumnNames: _col0, _col1, _col6 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col6 is null) (type: boolean) + predicate: _col6 is null (type: boolean) Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) @@ -312,7 +312,7 @@ outputColumnNames: _col0, _col1, _col5 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col5 is null) (type: boolean) + predicate: _col5 is null (type: boolean) Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) Index: ql/src/test/results/clientpositive/quotedid_partition.q.out =================================================================== --- ql/src/test/results/clientpositive/quotedid_partition.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/quotedid_partition.q.out (working copy) @@ -45,7 +45,7 @@ predicate: (x+1 = '10') (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: x+1 (type: string), y&y (type: string), !@#$%^&*()_q (type: string) + expressions: '10' (type: string), y&y (type: string), !@#$%^&*()_q (type: string) outputColumnNames: x+1, y&y, !@#$%^&*()_q Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Group By Operator Index: ql/src/test/results/clientpositive/udf_degrees.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_degrees.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_degrees.q.out (working copy) @@ -17,7 +17,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: degrees(pi()) (type: double) + expressions: 180.0 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink @@ -64,7 +64,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: degrees(pi()) (type: double) + expressions: 180.0 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/ppd_udf_col.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_udf_col.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/ppd_udf_col.q.out (working copy) @@ -18,24 +18,24 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1937 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 100) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), rand() (type: double) - outputColumnNames: _col0, _col2 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + expressions: rand() (type: double) + outputColumnNames: _col2 + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col2 <= 0.1) (type: boolean) - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col2 (type: double) + expressions: '100' (type: string), _col2 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -71,34 +71,34 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1937 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 100) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), rand() (type: double) - outputColumnNames: _col0, _col2 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + expressions: rand() (type: double) + outputColumnNames: _col2 + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col2 <= 0.1) (type: boolean) - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col2 (type: double) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + expressions: _col2 (type: double) + outputColumnNames: _col1 + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col1 > 0.1) (type: boolean) - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 107 Data size: 321 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: double) + expressions: '100' (type: string), _col1 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 107 Data size: 321 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 20 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 60 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 60 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -130,24 +130,24 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1937 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 100) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), rand() (type: double), hex(4) (type: string) - outputColumnNames: _col0, _col2, _col3 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + expressions: rand() (type: double), '4' (type: string) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col3 <= 3) (type: boolean) - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col2 (type: double), _col3 (type: string) + expressions: '100' (type: string), _col2 (type: double), _col3 (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -179,24 +179,24 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 100) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), rand() (type: double), (value * 10) (type: double) - outputColumnNames: _col0, _col2, _col3 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: rand() (type: double), (value * 10) (type: double) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col3 <= 200.0) (type: boolean) - Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 934 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col2 (type: double), _col3 (type: double) + expressions: '100' (type: string), _col2 (type: double), _col3 (type: double) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 934 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 934 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -228,24 +228,24 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1937 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 100) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), rand() (type: double) - outputColumnNames: _col0, _col2 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + expressions: rand() (type: double) + outputColumnNames: _col2 + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col2 <= 0.1) (type: boolean) - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col2 (type: double) + expressions: '100' (type: string), _col2 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -281,27 +281,27 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1937 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 100) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), rand() (type: double) - outputColumnNames: _col0, _col2 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + expressions: rand() (type: double) + outputColumnNames: _col2 + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((_col2 <= 0.1) and (_col2 > 0.1)) (type: boolean) - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 107 Data size: 321 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col2 (type: double) + expressions: '100' (type: string), _col2 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 107 Data size: 321 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 20 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 60 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 60 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -333,24 +333,24 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1937 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 100) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), rand() (type: double), hex(4) (type: string) - outputColumnNames: _col0, _col2, _col3 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + expressions: rand() (type: double), '4' (type: string) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 968 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col3 <= 3) (type: boolean) - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col2 (type: double), _col3 (type: string) + expressions: '100' (type: string), _col2 (type: double), _col3 (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 322 Data size: 966 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -382,24 +382,24 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 100) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), rand() (type: double), (value * 10) (type: double) - outputColumnNames: _col0, _col2, _col3 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + expressions: rand() (type: double), (value * 10) (type: double) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col3 <= 200.0) (type: boolean) - Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 934 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col2 (type: double), _col3 (type: double) + expressions: '100' (type: string), _col2 (type: double), _col3 (type: double) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 934 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 934 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/multi_insert.q.out =================================================================== --- ql/src/test/results/clientpositive/multi_insert.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/multi_insert.q.out (working copy) @@ -2501,45 +2501,45 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '0' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 2) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '2' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 4) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '4' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2595,45 +2595,45 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '0' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 2) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '2' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 4) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '4' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2689,45 +2689,45 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '0' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 2) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '2' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 4) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '4' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2783,45 +2783,45 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '0' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 2) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '2' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 4) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '4' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/union27.q.out =================================================================== --- ql/src/test/results/clientpositive/union27.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/union27.q.out (working copy) @@ -19,6 +19,89 @@ POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: default@jackson_sev_add +PREHOOK: query: explain select b.* from jackson_sev_same a join (select * from dim_pho union all select * from jackson_sev_add)b on a.key=b.key and b.key=97 +PREHOOK: type: QUERY +POSTHOOK: query: explain select b.* from jackson_sev_same a join (select * from dim_pho union all select * from jackson_sev_add)b on a.key=b.key and b.key=97 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key = 97) and key is not null) (type: boolean) + Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: '97' (type: string) + sort order: + + Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE + TableScan + alias: jackson_sev_add + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key = 97) and key is not null) (type: boolean) + Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE + Union + Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: '97' (type: string) + sort order: + + Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + TableScan + alias: dim_pho + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key = 97) and key is not null) (type: boolean) + Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE + Union + Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: '97' (type: string) + sort order: + + Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 + 1 {VALUE._col0} + outputColumnNames: _col5 + Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: '97' (type: string), _col5 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: select b.* from jackson_sev_same a join (select * from dim_pho union all select * from jackson_sev_add)b on a.key=b.key and b.key=97 PREHOOK: type: QUERY PREHOOK: Input: default@dim_pho Index: ql/src/test/results/clientpositive/ppd2.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd2.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/ppd2.q.out (working copy) @@ -348,40 +348,38 @@ Map Operator Tree: TableScan alias: y - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and (key = 20)) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: key (type: string) + key expressions: '20' (type: string) sort order: + - Map-reduce partition columns: key (type: string) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} - 1 {KEY.reducesinkkey0} - outputColumnNames: _col0, _col1, _col4 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + 0 {VALUE._col0} + 1 + outputColumnNames: _col1 + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string) - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + expressions: _col1 (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -397,16 +395,15 @@ key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: string), _col2 (type: string) + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator - expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string) + expressions: '20' (type: string), KEY.reducesinkkey0 (type: string), '20' (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/ppd_udf_case.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_udf_case.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/ppd_udf_case.q.out (working copy) @@ -70,8 +70,8 @@ predicate: (((_col2 = '2008-04-08') and (_col8 = '2008-04-08')) and CASE (_col0) WHEN ('27') THEN (true) WHEN ('38') THEN (false) ELSE (null) END) (type: boolean) Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col6 (type: string), _col7 (type: string), _col8 (type: string), _col9 (type: string) - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + expressions: _col0 (type: string), _col1 (type: string), _col3 (type: string), _col6 (type: string), _col7 (type: string), _col9 (type: string) + outputColumnNames: _col0, _col1, _col3, _col4, _col5, _col7 Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -85,12 +85,12 @@ Map Operator Tree: TableScan Reduce Output Operator - key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: string), _col7 (type: string) + key expressions: _col0 (type: string), _col1 (type: string), '2008-04-08' (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string), '2008-04-08' (type: string), _col7 (type: string) sort order: ++++++++ Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator - expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: string), KEY.reducesinkkey5 (type: string), KEY.reducesinkkey6 (type: string), KEY.reducesinkkey7 (type: string) + expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), '2008-04-08' (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: string), KEY.reducesinkkey5 (type: string), '2008-04-08' (type: string), KEY.reducesinkkey7 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/index_auto_empty.q.out =================================================================== --- ql/src/test/results/clientpositive/index_auto_empty.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/index_auto_empty.q.out (working copy) @@ -59,7 +59,7 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: string), val (type: string) + expressions: '86' (type: string), val (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/udf_nvl.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_nvl.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_nvl.q.out (working copy) @@ -34,7 +34,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: if 1 is null returns2 (type: int), if null is null returns5 (type: int) + expressions: 1 (type: int), if null is null returns5 (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/smb_mapjoin_18.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_18.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/smb_mapjoin_18.q.out (working copy) @@ -208,12 +208,14 @@ 253 PREHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation, one of the buckets should be empty + EXPLAIN INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '2') SELECT a.key, a.value FROM test_table1 a WHERE a.ds = '1' and a.key = 238 PREHOOK: type: QUERY POSTHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation, one of the buckets should be empty + EXPLAIN INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '2') SELECT a.key, a.value FROM test_table1 a WHERE a.ds = '1' and a.key = 238 Index: ql/src/test/results/clientpositive/input_part6.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part6.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/input_part6.q.out (working copy) @@ -16,12 +16,12 @@ alias: x Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (ds = ((2008 - 4) - 8)) (type: boolean) - Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + predicate: (ds = 1996) (type: boolean) + Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: key (type: string), value (type: string), '1996' (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 Statistics: Num rows: 10 Data size: 2000 Basic stats: COMPLETE Column stats: NONE Index: ql/src/test/results/clientpositive/literal_double.q.out =================================================================== --- ql/src/test/results/clientpositive/literal_double.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/literal_double.q.out (working copy) @@ -14,7 +14,7 @@ alias: src Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: 3.14 (type: double), (- 3.14) (type: double), 3.14E8 (type: double), 3.14E-8 (type: double), (- 3.14E8) (type: double), (- 3.14E-8) (type: double), 3.14E8 (type: double), 3.14E8 (type: double), 3.14E-8 (type: double) + expressions: 3.14 (type: double), -3.14 (type: double), 3.14E8 (type: double), 3.14E-8 (type: double), -3.14E8 (type: double), -3.14E-8 (type: double), 3.14E8 (type: double), 3.14E8 (type: double), 3.14E-8 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit Index: ql/src/test/results/clientpositive/udf_second.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_second.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_second.q.out (working copy) @@ -32,14 +32,14 @@ Processor Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: second('2009-08-07 13:14:15') (type: int), second('13:14:15') (type: int), second('2009-08-07') (type: int) + expressions: 15 (type: int), 15 (type: int), null (type: void) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: SELECT second('2009-08-07 13:14:15'), second('13:14:15'), second('2009-08-07') Index: ql/src/test/results/clientpositive/index_auto_multiple.q.out =================================================================== --- ql/src/test/results/clientpositive/index_auto_multiple.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/index_auto_multiple.q.out (working copy) @@ -91,17 +91,17 @@ TableScan alias: src filterExpr: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '86' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/list_bucket_dml_8.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_8.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_8.q.out (working copy) @@ -581,7 +581,7 @@ predicate: ((key = '484') and (value = 'val_484')) (type: boolean) Statistics: Num rows: 4 Data size: 34 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 4 Data size: 34 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/udf_10_trims.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_10_trims.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_10_trims.q.out (working copy) @@ -33,17 +33,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: trim(trim(trim(trim(trim(trim(trim(trim(trim(trim(' abc ')))))))))) (type: string) + expressions: 'abc' (type: string) outputColumnNames: _col0 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out =================================================================== --- ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out (working copy) @@ -2550,45 +2550,45 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '0' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 2) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '2' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 4) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '4' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2648,45 +2648,45 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '0' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 2) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '2' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 4) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '4' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2746,45 +2746,45 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '0' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 2) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '2' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 4) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '4' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2844,45 +2844,45 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '0' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 2) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '2' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Filter Operator predicate: (key = 4) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '4' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (working copy) @@ -471,6 +471,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -562,6 +563,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -1938,6 +1940,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col3 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -2029,6 +2032,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col3 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -2222,6 +2226,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -2313,6 +2318,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -2552,6 +2558,7 @@ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -2643,6 +2650,7 @@ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -3396,6 +3404,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -3487,6 +3496,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -4080,52 +4090,62 @@ alias: t1 Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE GatherStats: false - Select Operator - expressions: key (type: string) - outputColumnNames: key - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: count(1) - keys: key (type: string) - mode: final - outputColumnNames: _col0, _col1 + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: key Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: bigint) + Group By Operator + aggregations: count(1) + keys: key (type: string) + mode: final outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - tag: 0 - value expressions: _col0 (type: string), _col1 (type: bigint) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col1 (type: bigint) + auto parallelism: false TableScan alias: t1 Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE GatherStats: false - Select Operator - expressions: key (type: string) - outputColumnNames: key - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: count(1) - keys: key (type: string) - mode: final - outputColumnNames: _col0, _col1 + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: key Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: bigint) + Group By Operator + aggregations: count(1) + keys: key (type: string) + mode: final outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - tag: 1 - value expressions: _col1 (type: bigint) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + tag: 1 + value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -4185,20 +4205,20 @@ condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE._col0} {VALUE._col1} - 1 {VALUE._col1} + 0 {KEY.reducesinkkey0} {VALUE._col0} + 1 {VALUE._col0} outputColumnNames: _col0, _col1, _col3 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToInteger(_col0) (type: int), UDFToInteger((_col1 + _col3)) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -4391,23 +4411,28 @@ alias: t1 Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE GatherStats: false - Select Operator - expressions: key (type: string), val (type: string) - outputColumnNames: key, val - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: count(1) - keys: key (type: string), val (type: string) - mode: hash - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string), _col1 (type: string) - sort order: ++ - Map-reduce partition columns: rand() (type: double) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - tag: -1 - value expressions: _col2 (type: bigint) + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), val (type: string) + outputColumnNames: key, val + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(1) + keys: key (type: string), val (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string) + sort order: ++ + Map-reduce partition columns: rand() (type: double) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + tag: -1 + value expressions: _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -4468,7 +4493,7 @@ keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: partials outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -4496,9 +4521,10 @@ key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -4531,11 +4557,11 @@ keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: final outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -4561,36 +4587,42 @@ alias: t1 Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE GatherStats: false - Select Operator - expressions: key (type: string) - outputColumnNames: key - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: count(1) - keys: key (type: string) - mode: final - outputColumnNames: _col0, _col1 + Filter Operator + isSamplingPred: false + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: key Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: bigint) + Group By Operator + aggregations: count(1) + keys: key (type: string) + mode: final outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE - tag: 0 - value expressions: _col0 (type: string), _col1 (type: bigint) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + tag: 0 + value expressions: _col1 (type: bigint) + auto parallelism: false TableScan GatherStats: false Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE tag: 1 - value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) + value expressions: _col1 (type: string), _col2 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -4671,20 +4703,20 @@ condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE._col0} {VALUE._col1} - 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} + 0 {KEY.reducesinkkey0} {VALUE._col0} + 1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: string), _col3 (type: string), _col4 (type: bigint) outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -4795,6 +4827,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -4886,6 +4919,7 @@ Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) + auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: @@ -6911,11 +6945,11 @@ predicate: (key = 8) (type: boolean) Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), val (type: string) - outputColumnNames: _col0, _col1 + expressions: val (type: string) + outputColumnNames: _col1 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string) + expressions: '8' (type: string) outputColumnNames: _col0 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -6932,7 +6966,7 @@ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Select Operator - expressions: _col0 (type: string), _col1 (type: string) + expressions: '8' (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator Index: ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out =================================================================== --- ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out (working copy) @@ -323,7 +323,7 @@ Reduce Output Operator sort order: Statistics: Num rows: 1 Data size: 634 Basic stats: COMPLETE Column stats: NONE - value expressions: p_partkey (type: int), p_name (type: string), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string) + value expressions: p_name (type: string), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string) TableScan alias: p1 Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE @@ -337,8 +337,8 @@ Inner Join 0 to 1 condition expressions: 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} - 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 + 1 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -368,18 +368,18 @@ sort order: + Map-reduce partition columns: _col12 (type: string) Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string) + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} + 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} 1 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30 Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string) + expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), 1 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26 Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/udf_parse_url.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_parse_url.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_parse_url.q.out (working copy) @@ -55,17 +55,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PATH') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'REF') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k2') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k1') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k3') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'FILE') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PROTOCOL') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'USERINFO') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'AUTHORITY') (type: string) + expressions: 'facebook.com' (type: string), '/path1/p.php' (type: string), 'k1=v1&k2=v2' (type: string), 'Ref1' (type: string), 'v2' (type: string), 'v1' (type: string), null (type: void), '/path1/p.php?k1=v1&k2=v2' (type: string), 'http' (type: string), null (type: void), 'facebook.com' (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/list_bucket_dml_3.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_3.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_3.q.out (working copy) @@ -388,7 +388,7 @@ predicate: (key = '484') (type: boolean) Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '484' (type: string), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/list_bucket_dml_11.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_11.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_11.q.out (working copy) @@ -318,7 +318,7 @@ predicate: (value = 'val_466') (type: boolean) Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: key (type: string), 'val_466' (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/lateral_view_ppd.q.out =================================================================== --- ql/src/test/results/clientpositive/lateral_view_ppd.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/lateral_view_ppd.q.out (working copy) @@ -12,26 +12,26 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = '0') (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: value - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col1, _col4 - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -39,20 +39,20 @@ Select Operator expressions: array(1,2,3) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col1, _col4 - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -95,26 +95,26 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = '0') (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: value - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col1, _col4 - Statistics: Num rows: 21 Data size: 4207 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 4282 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 21 Data size: 4207 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 4282 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 4207 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 4282 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -122,23 +122,23 @@ Select Operator expressions: array(1,2,3) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE function name: explode Filter Operator predicate: (col = 1) (type: boolean) - Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 1427 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col1, _col4 - Statistics: Num rows: 21 Data size: 4207 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 4282 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 21 Data size: 4207 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 4282 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 4207 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 4282 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -266,35 +266,35 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = '0') (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: value - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col1, _col4 - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col1, _col4 - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col1, _col4, _col5 - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -302,20 +302,20 @@ Select Operator expressions: array(1,2,3) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col1, _col4, _col5 - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -323,29 +323,29 @@ Select Operator expressions: array(1,2,3) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col1, _col4 - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col1, _col4 - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col1, _col4, _col5 - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -353,20 +353,20 @@ Select Operator expressions: array(1,2,3) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5710 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col1, _col4, _col5 - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 56 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -429,26 +429,26 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = '0') (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: value - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col1, _col4 - Statistics: Num rows: 18 Data size: 3606 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 3772 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 18 Data size: 3606 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 3772 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 18 Data size: 3606 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 3772 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -456,23 +456,23 @@ Select Operator expressions: array(1,2,3) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE function name: explode Filter Operator predicate: (col > 1) (type: boolean) - Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 917 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col1, _col4 - Statistics: Num rows: 18 Data size: 3606 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 3772 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col4 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 18 Data size: 3606 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 3772 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 18 Data size: 3606 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 3772 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/insert1.q.out =================================================================== --- ql/src/test/results/clientpositive/insert1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/insert1.q.out (working copy) @@ -48,10 +48,10 @@ alias: a Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Filter Operator - predicate: (key = (- 1)) (type: boolean) + predicate: (key = -1) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: -1 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -137,10 +137,10 @@ alias: a Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Filter Operator - predicate: (key = (- 1)) (type: boolean) + predicate: (key = -1) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: -1 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -239,10 +239,10 @@ alias: a Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Filter Operator - predicate: (key = (- 1)) (type: boolean) + predicate: (key = -1) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: -1 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -328,10 +328,10 @@ alias: a Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Filter Operator - predicate: (key = (- 1)) (type: boolean) + predicate: (key = -1) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string) + expressions: -1 (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out (working copy) @@ -223,7 +223,7 @@ predicate: (x = 484) (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: x (type: int) + expressions: 484 (type: int) outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -370,7 +370,7 @@ predicate: (x = 495) (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: x (type: int) + expressions: 495 (type: int) outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -513,7 +513,7 @@ predicate: (x = 1) (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: x (type: int) + expressions: 1 (type: int) outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/sample8.q.out =================================================================== --- ql/src/test/results/clientpositive/sample8.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/sample8.q.out (working copy) @@ -330,7 +330,7 @@ predicate: ((((_col6 = _col0) and (_col7 = _col1)) and (_col2 = '2008-04-08')) and (_col3 = '11')) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) + expressions: _col0 (type: string), _col1 (type: string), '2008-04-08' (type: string), '11' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out (working copy) @@ -171,7 +171,7 @@ predicate: ((key = '484') and (value = 'val_484')) (type: boolean) Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string) + expressions: '484' (type: string) outputColumnNames: _col0 Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -331,7 +331,7 @@ predicate: ((key = '238') and (value = 'val_238')) (type: boolean) Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '238' (type: string), 'val_238' (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -638,7 +638,7 @@ predicate: (key = '495') (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '495' (type: string), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/set_processor_namespaces.q.out =================================================================== --- ql/src/test/results/clientpositive/set_processor_namespaces.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/set_processor_namespaces.q.out (working copy) @@ -17,17 +17,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 5) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '5' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/explain_logical.q.out =================================================================== --- ql/src/test/results/clientpositive/explain_logical.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/explain_logical.q.out (working copy) @@ -713,7 +713,7 @@ predicate: (ds = '10') (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator (SEL_2) - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: key (type: string), value (type: string), '10' (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator (FS_4) Index: ql/src/test/results/clientpositive/join_cond_pushdown_unqual1.q.out =================================================================== --- ql/src/test/results/clientpositive/join_cond_pushdown_unqual1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/join_cond_pushdown_unqual1.q.out (working copy) @@ -377,7 +377,7 @@ Reduce Output Operator sort order: Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - value expressions: p2_partkey (type: int), p2_name (type: string), p2_mfgr (type: string), p2_brand (type: string), p2_type (type: string), p2_size (type: int), p2_container (type: string), p2_retailprice (type: double), p2_comment (type: string) + value expressions: p2_name (type: string), p2_mfgr (type: string), p2_brand (type: string), p2_type (type: string), p2_size (type: int), p2_container (type: string), p2_retailprice (type: double), p2_comment (type: string) TableScan alias: p1 Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE @@ -391,8 +391,8 @@ Inner Join 0 to 1 condition expressions: 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} - 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 + 1 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -422,18 +422,18 @@ sort order: + Map-reduce partition columns: _col12 (type: string) Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string) + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} + 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} 1 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30 Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string) + expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), 1 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26 Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/select_unquote_and.q.out =================================================================== --- ql/src/test/results/clientpositive/select_unquote_and.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/select_unquote_and.q.out (working copy) @@ -58,7 +58,7 @@ alias: npe_test Statistics: Num rows: 498 Data size: 5290 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((ds > ((2012 - 11) - 31)) and (ds < ((2012 - 12) - 15))) (type: boolean) + predicate: ((ds > 1970) and (ds < 1985)) (type: boolean) Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string), ds (type: string) Index: ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out (working copy) @@ -36,11 +36,11 @@ alias: src Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (key > 100) (type: boolean) - Statistics: Num rows: 19 Data size: 1903 Basic stats: COMPLETE Column stats: NONE + predicate: ((key > 100) and key is not null) (type: boolean) + Statistics: Num rows: 10 Data size: 1002 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator condition expressions: - 0 {key} + 0 1 {value} keys: 0 key (type: string) @@ -53,8 +53,8 @@ alias: srcpart Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (key > 100) (type: boolean) - Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE + predicate: ((key > 100) and key is not null) (type: boolean) + Statistics: Num rows: 10 Data size: 2004 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -65,14 +65,14 @@ 0 key (type: string) 1 key (type: string) outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 2093 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 11 Data size: 1102 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToInteger(_col0) (type: int), _col5 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 2093 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 11 Data size: 1102 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 2093 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 11 Data size: 1102 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/udf_printf.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_printf.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_printf.q.out (working copy) @@ -38,7 +38,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: printf('Hello World %d %s', 100, 'days') (type: string) + expressions: 'Hello World 100 days' (type: string) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/udf_E.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_E.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_E.q.out (working copy) @@ -17,7 +17,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: e() (type: double) + expressions: 2.718281828459045 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink @@ -64,7 +64,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: e() (type: double) + expressions: 2.718281828459045 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/constprog2.q.out =================================================================== --- ql/src/test/results/clientpositive/constprog2.q.out (revision 0) +++ ql/src/test/results/clientpositive/constprog2.q.out (revision 0) @@ -0,0 +1,75 @@ +PREHOOK: query: EXPLAIN +SELECT src1.key, src1.key + 1, src2.value + FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT src1.key, src1.key + 1, src2.value + FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src2 + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key = 86) and key is not null) (type: boolean) + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: '86' (type: string) + sort order: + + Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: string) + TableScan + alias: src1 + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key = 86) and key is not null) (type: boolean) + Statistics: Num rows: 727 Data size: 1454 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: '86' (type: string) + sort order: + + Statistics: Num rows: 727 Data size: 1454 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 + 1 {VALUE._col0} + outputColumnNames: _col5 + Statistics: Num rows: 799 Data size: 1599 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: '86' (type: string), 87.0 (type: double), _col5 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 799 Data size: 1599 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 799 Data size: 1599 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT src1.key, src1.key + 1, src2.value + FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT src1.key, src1.key + 1, src2.value + FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +86 87.0 val_86 Index: ql/src/test/results/clientpositive/set_variable_sub.q.out =================================================================== --- ql/src/test/results/clientpositive/set_variable_sub.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/set_variable_sub.q.out (working copy) @@ -12,17 +12,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 54 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 'value1') (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: 'value1' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -48,17 +48,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 54 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 'value1') (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: 'value1' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,17 +84,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = '1') (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '1' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/input38.q.out =================================================================== --- ql/src/test/results/clientpositive/input38.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/input38.q.out (working copy) @@ -39,7 +39,7 @@ alias: src Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), (1 + 2) (type: int), (3 + 4) (type: int) + expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Transform Operator Index: ql/src/test/results/clientpositive/quotedid_basic.q.out =================================================================== --- ql/src/test/results/clientpositive/quotedid_basic.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/quotedid_basic.q.out (working copy) @@ -76,7 +76,7 @@ predicate: (!@#$%^&*()_q = '1') (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: x+1 (type: string), y&y (type: string), !@#$%^&*()_q (type: string) + expressions: x+1 (type: string), y&y (type: string), '1' (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -112,7 +112,7 @@ predicate: (!@#$%^&*()_q = '1') (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: x+1 (type: string), y&y (type: string), !@#$%^&*()_q (type: string) + expressions: x+1 (type: string), y&y (type: string), '1' (type: string) outputColumnNames: x+1, y&y, !@#$%^&*()_q Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Group By Operator @@ -171,7 +171,7 @@ predicate: (!@#$%^&*()_q = '1') (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: x+1 (type: string), y&y (type: string), !@#$%^&*()_q (type: string) + expressions: x+1 (type: string), y&y (type: string), '1' (type: string) outputColumnNames: x+1, y&y, !@#$%^&*()_q Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Group By Operator @@ -254,7 +254,7 @@ predicate: (!@#$%^&*()_q = '1') (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: x+1 (type: string), y&y (type: string), !@#$%^&*()_q (type: string) + expressions: x+1 (type: string), y&y (type: string), '1' (type: string) outputColumnNames: x+1, y&y, !@#$%^&*()_q Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Group By Operator Index: ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out =================================================================== --- ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out (working copy) @@ -95,17 +95,17 @@ TableScan alias: srcpart filterExpr: ((key = 86) and (ds = '2008-04-09')) (type: boolean) - Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 112 Data size: 11624 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '86' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/cross_product_check_1.q.out =================================================================== --- ql/src/test/results/clientpositive/cross_product_check_1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/cross_product_check_1.q.out (working copy) @@ -220,24 +220,21 @@ 1 outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe Stage: Stage-2 Map Reduce @@ -345,24 +342,21 @@ 1 outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe Stage: Stage-2 Map Reduce @@ -483,24 +477,21 @@ 1 outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) + Group By Operator + keys: _col0 (type: string) + mode: hash outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/udf_coalesce.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_coalesce.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_coalesce.q.out (working copy) @@ -66,7 +66,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: COALESCE(1) (type: int), COALESCE(1,2) (type: int), COALESCE(null,2) (type: int), COALESCE(1,null) (type: int), COALESCE(null,null,3) (type: int), COALESCE(4,null,null,null) (type: int), COALESCE('1') (type: string), COALESCE('1','2') (type: string), COALESCE(null,'2') (type: string), COALESCE('1',null) (type: string), COALESCE(null,null,'3') (type: string), COALESCE('4',null,null,null) (type: string), COALESCE(1.0) (type: double), COALESCE(1.0,2.0) (type: double), COALESCE(null,2.0) (type: double), COALESCE(null,2.0,3.0) (type: double), COALESCE(2.0,null,3.0) (type: double), COALESCE(if(true, null, 0),null) (type: int) + expressions: 1 (type: int), 1 (type: int), COALESCE(null,2) (type: int), COALESCE(1,null) (type: int), COALESCE(null,null,3) (type: int), COALESCE(4,null,null,null) (type: int), '1' (type: string), '1' (type: string), COALESCE(null,'2') (type: string), COALESCE('1',null) (type: string), COALESCE(null,null,'3') (type: string), COALESCE('4',null,null,null) (type: string), 1.0 (type: double), 1.0 (type: double), COALESCE(null,2.0) (type: double), COALESCE(null,2.0,3.0) (type: double), COALESCE(2.0,null,3.0) (type: double), COALESCE(if(true, null, 0),null) (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/udf_minute.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_minute.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_minute.q.out (working copy) @@ -32,17 +32,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: minute('2009-08-07 13:14:15') (type: int), minute('13:14:15') (type: int), minute('2009-08-07') (type: int) + expressions: 14 (type: int), 14 (type: int), null (type: void) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/udf5.q.out =================================================================== --- ql/src/test/results/clientpositive/udf5.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf5.q.out (working copy) @@ -32,7 +32,7 @@ alias: dest1 Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: from_unixtime(1226446340) (type: string), to_date(from_unixtime(1226446340)) (type: string), day('2008-11-01') (type: int), month('2008-11-01') (type: int), year('2008-11-01') (type: int), day('2008-11-01 15:32:20') (type: int), month('2008-11-01 15:32:20') (type: int), year('2008-11-01 15:32:20') (type: int) + expressions: '2008-11-11 15:32:20' (type: string), '2008-11-11' (type: string), 1 (type: int), 11 (type: int), 2008 (type: int), 1 (type: int), 11 (type: int), 2008 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 Statistics: Num rows: 1 Data size: 221 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator Index: ql/src/test/results/clientpositive/union33.q.out =================================================================== --- ql/src/test/results/clientpositive/union33.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/union33.q.out (working copy) @@ -111,23 +111,23 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '0' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -135,14 +135,14 @@ name: default.test_src TableScan Union - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -334,14 +334,14 @@ Map Operator Tree: TableScan Union - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -349,23 +349,23 @@ name: default.test_src TableScan alias: src - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string) + expressions: '0' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 2855 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 43 Data size: 5711 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 57 Data size: 5761 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out =================================================================== --- ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out (working copy) @@ -340,7 +340,7 @@ Reduce Output Operator sort order: Statistics: Num rows: 1 Data size: 634 Basic stats: COMPLETE Column stats: NONE - value expressions: p_partkey (type: int), p_name (type: string), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string) + value expressions: p_name (type: string), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string) TableScan alias: p1 Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE @@ -354,8 +354,8 @@ Inner Join 0 to 1 condition expressions: 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} - 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 + 1 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -385,26 +385,26 @@ sort order: + Map-reduce partition columns: _col12 (type: string) Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string) + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} + 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} 1 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30 Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((_col11 = 1) and (_col23 = _col12)) (type: boolean) - Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE + predicate: (_col23 = _col12) (type: boolean) + Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string) + expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), 1 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26 - Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/list_bucket_dml_5.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_5.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_5.q.out (working copy) @@ -461,7 +461,7 @@ predicate: ((key = '103') and (value = 'val_103')) (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: '103' (type: string), 'val_103' (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/udf_PI.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_PI.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_PI.q.out (working copy) @@ -17,7 +17,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: pi() (type: double) + expressions: 3.141592653589793 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink @@ -64,7 +64,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: pi() (type: double) + expressions: 3.141592653589793 (type: double) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/list_bucket_dml_13.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_13.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_13.q.out (working copy) @@ -331,7 +331,7 @@ predicate: ((col2 = '466') and (col4 = 'val_466')) (type: boolean) Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: col1 (type: string), col2 (type: string), col3 (type: string), col4 (type: string), col5 (type: string), ds (type: string), hr (type: string) + expressions: col1 (type: string), '466' (type: string), col3 (type: string), 'val_466' (type: string), col5 (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/union_view.q.out =================================================================== --- ql/src/test/results/clientpositive/union_view.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/union_view.q.out (working copy) @@ -76,7 +76,7 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string), ds (type: string) + expressions: 86 (type: int), value (type: string), ds (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -135,7 +135,7 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string), ds (type: string) + expressions: 86 (type: int), value (type: string), ds (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -194,7 +194,7 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string), ds (type: string) + expressions: 86 (type: int), value (type: string), ds (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -474,13 +474,13 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string), ds (type: string) - outputColumnNames: _col0, _col1, _col2 + expressions: value (type: string), ds (type: string) + outputColumnNames: _col1, _col2 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Union Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) + expressions: 86 (type: int), _col1 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -539,13 +539,13 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string), ds (type: string) - outputColumnNames: _col0, _col1, _col2 + expressions: value (type: string), ds (type: string) + outputColumnNames: _col1, _col2 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Union Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) + expressions: 86 (type: int), _col1 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -604,13 +604,13 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string), ds (type: string) - outputColumnNames: _col0, _col1, _col2 + expressions: value (type: string), ds (type: string) + outputColumnNames: _col1, _col2 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Union Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) + expressions: 86 (type: int), _col1 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -673,20 +673,20 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string), ds (type: string) - outputColumnNames: _col0, _col1, _col2 + expressions: value (type: string), ds (type: string) + outputColumnNames: _col1, _col2 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Union Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - outputColumnNames: _col0, _col1, _col2 + expressions: _col1 (type: string), _col2 (type: string) + outputColumnNames: _col1, _col2 Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col2 (type: string) sort order: + Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int), _col1 (type: string) + value expressions: _col1 (type: string) TableScan alias: src_union_1 filterExpr: ((key = 86) and ds is not null) (type: boolean) @@ -695,20 +695,20 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string), ds (type: string) - outputColumnNames: _col0, _col1, _col2 + expressions: value (type: string), ds (type: string) + outputColumnNames: _col1, _col2 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Union Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - outputColumnNames: _col0, _col1, _col2 + expressions: _col1 (type: string), _col2 (type: string) + outputColumnNames: _col1, _col2 Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col2 (type: string) sort order: + Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int), _col1 (type: string) + value expressions: _col1 (type: string) TableScan alias: src_union_3 filterExpr: ((key = 86) and ds is not null) (type: boolean) @@ -717,23 +717,23 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string), ds (type: string) - outputColumnNames: _col0, _col1, _col2 + expressions: value (type: string), ds (type: string) + outputColumnNames: _col1, _col2 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Union Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - outputColumnNames: _col0, _col1, _col2 + expressions: _col1 (type: string), _col2 (type: string) + outputColumnNames: _col1, _col2 Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col2 (type: string) sort order: + Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int), _col1 (type: string) + value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator - expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), KEY.reducesinkkey0 (type: string) + expressions: 86 (type: int), VALUE._col1 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 1250 Data size: 13280 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -1078,13 +1078,13 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: int), value (type: string), ds (type: string) - outputColumnNames: _col0, _col1, _col2 + expressions: value (type: string), ds (type: string) + outputColumnNames: _col1, _col2 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Union Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) + expressions: 86 (type: int), _col1 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/groupby_ppd.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppd.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/groupby_ppd.q.out (working copy) @@ -26,13 +26,13 @@ predicate: (bar = 1) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: bar (type: int), foo (type: int) - outputColumnNames: _col0, _col1 + expressions: foo (type: int) + outputColumnNames: _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Union Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: _col1 (type: int), _col0 (type: int) + expressions: _col1 (type: int), 1 (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Group By Operator @@ -52,13 +52,13 @@ predicate: (bar = 1) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: bar (type: int), foo (type: int) - outputColumnNames: _col0, _col1 + expressions: foo (type: int) + outputColumnNames: _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Union Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: _col1 (type: int), _col0 (type: int) + expressions: _col1 (type: int), 1 (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Group By Operator Index: ql/src/test/results/clientpositive/udf_current_database.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_current_database.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_current_database.q.out (working copy) @@ -22,7 +22,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: current_database() (type: string) + expressions: 'default' (type: string) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE File Output Operator @@ -75,7 +75,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: current_database() (type: string) + expressions: 'xxx' (type: string) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE File Output Operator @@ -124,7 +124,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: current_database() (type: string) + expressions: 'default' (type: string) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE ListSink @@ -161,7 +161,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: current_database() (type: string) + expressions: 'xxx' (type: string) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/input39_hadoop20.q.out =================================================================== --- ql/src/test/results/clientpositive/input39_hadoop20.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/input39_hadoop20.q.out (working copy) @@ -73,24 +73,24 @@ alias: t2 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((hash(rand(460476415)) & 2147483647) % 32) = 0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + predicate: ((((hash(rand(460476415)) & 2147483647) % 32) = 0) and key is not null) (type: boolean) + Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE TableScan alias: t1 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((hash(rand(460476415)) & 2147483647) % 32) = 0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + predicate: ((((hash(rand(460476415)) & 2147483647) % 32) = 0) and key is not null) (type: boolean) + Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -98,9 +98,9 @@ condition expressions: 0 1 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) mode: hash Index: ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out (working copy) @@ -287,7 +287,7 @@ predicate: (key = '145') (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: '145' (type: string), value (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -600,7 +600,7 @@ predicate: ((key = '484') and (value = 'val_484')) (type: boolean) Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -754,7 +754,7 @@ predicate: ((key = '327') and (value = 'val_327')) (type: boolean) Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: '327' (type: string), 'val_327' (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/join_cond_pushdown_unqual3.q.out =================================================================== --- ql/src/test/results/clientpositive/join_cond_pushdown_unqual3.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/join_cond_pushdown_unqual3.q.out (working copy) @@ -394,7 +394,7 @@ Reduce Output Operator sort order: Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - value expressions: p2_partkey (type: int), p2_name (type: string), p2_mfgr (type: string), p2_brand (type: string), p2_type (type: string), p2_size (type: int), p2_container (type: string), p2_retailprice (type: double), p2_comment (type: string) + value expressions: p2_name (type: string), p2_mfgr (type: string), p2_brand (type: string), p2_type (type: string), p2_size (type: int), p2_container (type: string), p2_retailprice (type: double), p2_comment (type: string) TableScan alias: p1 Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE @@ -408,8 +408,8 @@ Inner Join 0 to 1 condition expressions: 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} - 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 + 1 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -439,26 +439,26 @@ sort order: + Map-reduce partition columns: _col12 (type: string) Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string) + value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} + 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} 1 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30 Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((_col11 = 1) and (_col23 = _col12)) (type: boolean) - Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE + predicate: (_col23 = _col12) (type: boolean) + Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string) + expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), 1 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26 - Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/subquery_notin.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_notin.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_notin.q.out (working copy) @@ -228,7 +228,7 @@ outputColumnNames: _col0, _col1, _col4 Statistics: Num rows: 34 Data size: 7032 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col4 is null) (type: boolean) + predicate: _col4 is null (type: boolean) Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) @@ -469,7 +469,7 @@ outputColumnNames: _col1, _col2, _col5, _col11 Statistics: Num rows: 17 Data size: 3839 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col11 is null) (type: boolean) + predicate: _col11 is null (type: boolean) Statistics: Num rows: 8 Data size: 1806 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) @@ -743,7 +743,7 @@ outputColumnNames: _col1, _col5, _col11 Statistics: Num rows: 36 Data size: 3839 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col11 is null) (type: boolean) + predicate: _col11 is null (type: boolean) Statistics: Num rows: 18 Data size: 1919 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col5 (type: int) @@ -1040,7 +1040,7 @@ outputColumnNames: _col1, _col2, _col5, _col11 Statistics: Num rows: 17 Data size: 3839 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col11 is null) (type: boolean) + predicate: _col11 is null (type: boolean) Statistics: Num rows: 8 Data size: 1806 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) @@ -1567,7 +1567,7 @@ outputColumnNames: _col0, _col1 Statistics: Num rows: 22 Data size: 2302 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col1 is null) (type: boolean) + predicate: _col1 is null (type: boolean) Statistics: Num rows: 11 Data size: 1151 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) Index: ql/src/test/results/clientpositive/subquery_notexists_having.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_notexists_having.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_notexists_having.q.out (working copy) @@ -93,7 +93,7 @@ outputColumnNames: _col0, _col1, _col4 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col4 is null) (type: boolean) + predicate: _col4 is null (type: boolean) Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) @@ -237,7 +237,7 @@ outputColumnNames: _col0, _col1, _col3 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col3 is null) (type: boolean) + predicate: _col3 is null (type: boolean) Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) Index: ql/src/test/results/clientpositive/num_op_type_conv.q.out =================================================================== --- ql/src/test/results/clientpositive/num_op_type_conv.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/num_op_type_conv.q.out (working copy) @@ -20,7 +20,7 @@ alias: src Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: (null + 7) (type: double), (1.0 - null) (type: double), (null + null) (type: double), (UDFToLong(21) % UDFToByte(5)) (type: bigint), (UDFToLong(21) % UDFToLong(21)) (type: bigint), (9 % '3') (type: double) + expressions: (null + 7) (type: double), (1.0 - null) (type: double), (null + null) (type: double), 1 (type: bigint), 0 (type: bigint), 0.0 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit Index: ql/src/test/results/clientpositive/udf_case.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_case.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_case.q.out (working copy) @@ -77,7 +77,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: CASE (1) WHEN (1) THEN (2) WHEN (3) THEN (4) ELSE (5) END (type: int), CASE (2) WHEN (1) THEN (2) ELSE (5) END (type: int), CASE (14) WHEN (12) THEN (13) WHEN (14) THEN (15) END (type: int), CASE (16) WHEN (12) THEN (13) WHEN (14) THEN (15) END (type: int), CASE (17) WHEN (18) THEN (null) WHEN (17) THEN (20) END (type: int), CASE (21) WHEN (22) THEN (23) WHEN (21) THEN (24) END (type: int) + expressions: 2 (type: int), 5 (type: int), 15 (type: int), null (type: void), CASE (17) WHEN (18) THEN (null) WHEN (17) THEN (20) END (type: int), 24 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/subquery_views.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_views.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_views.q.out (working copy) @@ -65,6 +65,325 @@ POSTHOOK: Output: default@cv2 Warning: Shuffle Join JOIN[42][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-5:MAPRED' is a cross product Warning: Shuffle Join JOIN[18][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain +select * +from cv2 where cv2.key in (select key from cv2 c where c.key < '11') +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * +from cv2 where cv2.key in (select key from cv2 c where c.key < '11') +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-5 depends on stages: Stage-4 + Stage-6 depends on stages: Stage-5 + Stage-3 depends on stages: Stage-2, Stage-6 + Stage-9 is a root stage + Stage-1 depends on stages: Stage-9 + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((value > 'val_11') and ((key is null or value is null) or key is null)) (type: boolean) + Statistics: Num rows: 12 Data size: 2404 Basic stats: COMPLETE Column stats: NONE + Select Operator + Statistics: Num rows: 12 Data size: 2404 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (_col0 = 0) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: _col0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Group By Operator + keys: _col0 (type: bigint) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-5 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + sort order: + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + TableScan + alias: b + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key < '11') and key is not null) (type: boolean) + Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE + value expressions: key (type: string), value (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Left Semi Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 5 Data size: 1102 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-6 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string), _col0 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col0 (type: string) + Statistics: Num rows: 5 Data size: 1102 Basic stats: COMPLETE Column stats: NONE + TableScan + alias: a + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (((value > 'val_11') and (key < '11')) and key is not null) (type: boolean) + Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string), key (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string) + Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {KEY.reducesinkkey0} + 1 {KEY.reducesinkkey0} + outputColumnNames: _col0, _col4 + Statistics: Num rows: 5 Data size: 1212 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col4 is null (type: boolean) + Statistics: Num rows: 2 Data size: 484 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 2 Data size: 484 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: string) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 2 Data size: 484 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 8 Data size: 1711 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + TableScan + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 2 Data size: 484 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Join Operator + condition map: + Left Semi Join 0 to 1 + condition expressions: + 0 {KEY.reducesinkkey0} {VALUE._col0} + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 8 Data size: 1882 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 8 Data size: 1882 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 8 Data size: 1882 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-9 + Map Reduce + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((value > 'val_11') and ((key is null or value is null) or key is null)) (type: boolean) + Statistics: Num rows: 12 Data size: 2404 Basic stats: COMPLETE Column stats: NONE + Select Operator + Statistics: Num rows: 12 Data size: 2404 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (_col0 = 0) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: _col0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Group By Operator + keys: _col0 (type: bigint) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 15 Data size: 3006 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 15 Data size: 3006 Basic stats: COMPLETE Column stats: NONE + value expressions: key (type: string), value (type: string) + TableScan + Reduce Output Operator + sort order: + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Reduce Operator Tree: + Join Operator + condition map: + Left Semi Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string), _col0 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col0 (type: string) + Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE + TableScan + alias: a + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((value > 'val_11') and key is not null) (type: boolean) + Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string), key (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string) + Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {KEY.reducesinkkey0} {KEY.reducesinkkey1} + 1 {KEY.reducesinkkey0} + outputColumnNames: _col0, _col1, _col4 + Statistics: Num rows: 17 Data size: 3636 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col4 is null (type: boolean) + Statistics: Num rows: 8 Data size: 1711 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 8 Data size: 1711 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[42][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-5:MAPRED' is a cross product +Warning: Shuffle Join JOIN[18][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: select * from cv2 where cv2.key in (select key from cv2 c where c.key < '11') PREHOOK: type: QUERY Index: ql/src/test/results/clientpositive/udf_abs.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_abs.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_abs.q.out (working copy) @@ -42,7 +42,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: abs(0) (type: int), abs((- 1)) (type: int), abs(123) (type: int), abs((- 9223372036854775807)) (type: bigint), abs(9223372036854775807) (type: bigint) + expressions: 0 (type: int), 1 (type: int), 123 (type: int), 9223372036854775807 (type: bigint), 9223372036854775807 (type: bigint) outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink @@ -93,7 +93,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: abs(0.0) (type: double), abs((- 3.14159265)) (type: double), abs(3.14159265) (type: double) + expressions: 0.0 (type: double), 3.14159265 (type: double), 3.14159265 (type: double) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/input26.q.out =================================================================== --- ql/src/test/results/clientpositive/input26.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/input26.q.out (working copy) @@ -85,7 +85,7 @@ Map Reduce Reduce Operator Tree: Select Operator - expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string) + expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), '2008-04-08' (type: string), '14' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Limit Index: ql/src/test/results/clientpositive/constprog_dp.q.out =================================================================== --- ql/src/test/results/clientpositive/constprog_dp.q.out (revision 0) +++ ql/src/test/results/clientpositive/constprog_dp.q.out (revision 0) @@ -0,0 +1,80 @@ +PREHOOK: query: create table dest(key string, value string) partitioned by (ds string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +POSTHOOK: query: create table dest(key string, value string) partitioned by (ds string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@dest +PREHOOK: query: EXPLAIN +from srcpart +insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08' +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +from srcpart +insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08' +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + Stage-2 depends on stages: Stage-0 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: srcpart + Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string), ds (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col2 (type: string) + sort order: + + Map-reduce partition columns: _col2 (type: string) + Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) + Reduce Operator Tree: + Extract + Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest + + Stage: Stage-0 + Move Operator + tables: + partition: + ds + replace: true + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest + + Stage: Stage-2 + Stats-Aggr Operator + +PREHOOK: query: from srcpart +insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08' +PREHOOK: type: QUERY +PREHOOK: Input: default@srcpart +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 +PREHOOK: Output: default@dest +POSTHOOK: query: from srcpart +insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcpart +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 +POSTHOOK: Output: default@dest@ds=2008-04-08 +POSTHOOK: Lineage: dest PARTITION(ds=2008-04-08).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest PARTITION(ds=2008-04-08).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/subquery_multiinsert.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_multiinsert.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_multiinsert.q.out (working copy) @@ -183,7 +183,7 @@ outputColumnNames: _col0, _col1, _col4 Statistics: Num rows: 34 Data size: 7032 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col4 is null) (type: boolean) + predicate: _col4 is null (type: boolean) Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) @@ -270,21 +270,18 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.src_4 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.src_4 Stage: Stage-0 Move Operator @@ -678,21 +675,18 @@ 1 _col0 (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.src_4 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.src_4 Local Work: Map Reduce Local Work @@ -751,7 +745,7 @@ outputColumnNames: _col0, _col1, _col4 Statistics: Num rows: 34 Data size: 7032 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col4 is null) (type: boolean) + predicate: _col4 is null (type: boolean) Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) Index: ql/src/test/results/clientpositive/ppd_outer_join5.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join5.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/ppd_outer_join5.q.out (working copy) @@ -45,9 +45,8 @@ predicate: (id = 20) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Reduce Output Operator - key expressions: id (type: int) + key expressions: 20 (type: int) sort order: + - Map-reduce partition columns: id (type: int) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE value expressions: key (type: string), value (type: string) TableScan @@ -82,11 +81,11 @@ condition expressions: 0 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} 1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} - 2 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} - outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10, _col11, _col12 + 2 {VALUE._col0} {VALUE._col1} + outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col11, _col12 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: string), _col10 (type: int), _col11 (type: string), _col12 (type: string) + expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: string), 20 (type: int), _col11 (type: string), _col12 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -134,9 +133,8 @@ predicate: (id = 20) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Reduce Output Operator - key expressions: id (type: int) + key expressions: 20 (type: int) sort order: + - Map-reduce partition columns: id (type: int) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE value expressions: key (type: string), value (type: string) TableScan @@ -146,9 +144,8 @@ predicate: (id = 20) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Reduce Output Operator - key expressions: id (type: int) + key expressions: 20 (type: int) sort order: + - Map-reduce partition columns: id (type: int) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE value expressions: key (type: string), value (type: string) Reduce Operator Tree: @@ -157,13 +154,13 @@ Inner Join 0 to 1 Left Outer Join1 to 2 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} - 1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} 2 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} - outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10, _col11, _col12 + outputColumnNames: _col1, _col2, _col6, _col7, _col10, _col11, _col12 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: string), _col10 (type: int), _col11 (type: string), _col12 (type: string) + expressions: 20 (type: int), _col1 (type: string), _col2 (type: string), 20 (type: int), _col6 (type: string), _col7 (type: string), _col10 (type: int), _col11 (type: string), _col12 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -211,9 +208,8 @@ predicate: (id = 20) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Reduce Output Operator - key expressions: id (type: int) + key expressions: 20 (type: int) sort order: + - Map-reduce partition columns: id (type: int) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE value expressions: key (type: string), value (type: string) TableScan @@ -223,9 +219,8 @@ predicate: (id = 20) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Reduce Output Operator - key expressions: id (type: int) + key expressions: 20 (type: int) sort order: + - Map-reduce partition columns: id (type: int) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE value expressions: key (type: string), value (type: string) Reduce Operator Tree: @@ -234,13 +229,13 @@ Inner Join 0 to 1 Left Outer Join0 to 2 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} - 1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} 2 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} - outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10, _col11, _col12 + outputColumnNames: _col1, _col2, _col6, _col7, _col10, _col11, _col12 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: string), _col10 (type: int), _col11 (type: string), _col12 (type: string) + expressions: 20 (type: int), _col1 (type: string), _col2 (type: string), 20 (type: int), _col6 (type: string), _col7 (type: string), _col10 (type: int), _col11 (type: string), _col12 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/subq_where_serialization.q.out =================================================================== --- ql/src/test/results/clientpositive/subq_where_serialization.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subq_where_serialization.q.out (working copy) @@ -93,17 +93,15 @@ 0 key (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Filter Operator - predicate: (1 = 1) (type: boolean) - Select Operator - expressions: _col0 (type: string) - outputColumnNames: _col0 - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Local Work: Map Reduce Local Work @@ -136,20 +134,17 @@ 1 outputColumnNames: _col0 Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string) + outputColumnNames: _col0 Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string) - outputColumnNames: _col0 + File Output Operator + compressed: false Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_locate.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_locate.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_locate.q.out (working copy) @@ -64,7 +64,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: locate('abc''abcd') (type: int), locate('ccc''abcabc') (type: int), locate('23'123) (type: int), locate(23123) (type: int), locate('abc''abcabc'2) (type: int), locate('abc''abcabc''2') (type: int), locate(1true) (type: int), locate(1false) (type: int), locate(UDFToByte('2')'12345') (type: int), locate('34'UDFToShort('12345')) (type: int), locate('456'UDFToLong('123456789012')) (type: int), locate('.25'UDFToFloat(1.25)) (type: int), locate('.0'16.0) (type: int), locate(null'abc') (type: int), locate('abc'null) (type: int), locate('abc''abcd'null) (type: int), locate('abc''abcd''invalid number') (type: int) + expressions: 1 (type: int), 0 (type: int), 2 (type: int), 2 (type: int), 4 (type: int), 4 (type: int), 0 (type: int), 0 (type: int), 2 (type: int), 3 (type: int), 4 (type: int), 2 (type: int), 3 (type: int), locate(null'abc') (type: int), locate('abc'null) (type: int), locate('abc''abcd'null) (type: int), 0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/udf7.q.out =================================================================== --- ql/src/test/results/clientpositive/udf7.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf7.q.out (working copy) @@ -46,7 +46,7 @@ alias: dest1 Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: round(ln(3.0), 12) (type: double), ln(0.0) (type: double), ln((- 1)) (type: double), round(log(3.0), 12) (type: double), log(0.0) (type: double), log((- 1)) (type: double), round(log2(3.0), 12) (type: double), log2(0.0) (type: double), log2((- 1)) (type: double), round(log10(3.0), 12) (type: double), log10(0.0) (type: double), log10((- 1)) (type: double), round(log(2, 3.0), 12) (type: double), log(2, 0.0) (type: double), log(2, (- 1)) (type: double), log(0.5, 2) (type: double), log(2, 0.5) (type: double), round(exp(2.0), 12) (type: double), power(2, 3) (type: double), power(2, 3) (type: double), power(2, (- 3)) (type: double), power(0.5, (- 3)) (type: double), power(4, 0.5) (type: double), power((- 1), 0.5) (type: double), power((- 1), 2) (type: double), power(CAST( 1 AS decimal(10,0)), 0) (type: double), power(CAST( 2 AS decimal(10,0)), 3) (type: double), power(CAST( 2 AS decimal(10,0)), 3) (type: double) + expressions: 1.098612288668 (type: double), null (type: void), null (type: void), 1.098612288668 (type: double), null (type: void), null (type: void), 1.584962500721 (type: double), null (type: void), null (type: void), 0.47712125472 (type: double), null (type: void), null (type: void), 1.584962500721 (type: double), null (type: void), null (type: void), null (type: void), -1.0 (type: double), 7.389056098931 (type: double), 8.0 (type: double), 8.0 (type: double), 0.125 (type: double), 8.0 (type: double), 2.0 (type: double), NaN (type: double), 1.0 (type: double), power(CAST( 1 AS decimal(10,0)), 0) (type: double), power(CAST( 2 AS decimal(10,0)), 3) (type: double), power(CAST( 2 AS decimal(10,0)), 3) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27 Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator Index: ql/src/test/results/clientpositive/nullgroup4.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup4.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/nullgroup4.q.out (working copy) @@ -15,25 +15,25 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 9999) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: value - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1), count(DISTINCT value) keys: value (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -105,24 +105,24 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 9999) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: value - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1), count(DISTINCT value) keys: value (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -174,19 +174,19 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 9999) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: value - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator aggregations: count(1), count(DISTINCT KEY._col0:0._col0) @@ -257,18 +257,18 @@ Map Operator Tree: TableScan alias: x - Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 9999) (type: boolean) - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: value - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + - Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 2853 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator aggregations: count(1), count(DISTINCT KEY._col0:0._col0) Index: ql/src/test/results/clientpositive/annotate_stats_filter.q.out =================================================================== --- ql/src/test/results/clientpositive/annotate_stats_filter.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/annotate_stats_filter.q.out (working copy) @@ -136,7 +136,7 @@ predicate: (state = 'OH') (type: boolean) Statistics: Num rows: 4 Data size: 398 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) + expressions: 'OH' (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 4 Data size: 398 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -272,7 +272,7 @@ predicate: (state = 'OH') (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) + expressions: 'OH' (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator @@ -652,7 +652,7 @@ predicate: zip is null (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) + expressions: state (type: string), locid (type: int), null (type: void), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator @@ -1144,103 +1144,22 @@ STAGE DEPENDENCIES: - Stage-1 is a root stage - Stage-0 depends on stages: Stage-1 + Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Map Operator Tree: - TableScan - alias: loc_orc - Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE - GatherStats: false - Filter Operator - isSamplingPred: false - predicate: (not false) (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE - Select Operator - expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:int:bigint:int - escape.delim \ - hive.serialization.extend.nesting.levels true - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false - Path -> Alias: -#### A masked pattern was here #### - Path -> Partition: -#### A masked pattern was here #### - Partition - base file name: loc_orc - input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat - properties: - COLUMN_STATS_ACCURATE true - bucket_count -1 - columns state,locid,zip,year - columns.comments - columns.types string:int:bigint:int - field.delim | -#### A masked pattern was here #### - name default.loc_orc - numFiles 1 - numRows 8 - rawDataSize 796 - serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year} - serialization.format | - serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 493 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - - input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat - properties: - COLUMN_STATS_ACCURATE true - bucket_count -1 - columns state,locid,zip,year - columns.comments - columns.types string:int:bigint:int - field.delim | -#### A masked pattern was here #### - name default.loc_orc - numFiles 1 - numRows 8 - rawDataSize 796 - serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year} - serialization.format | - serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 493 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.loc_orc - name: default.loc_orc - Truncated Path -> Alias: - /loc_orc [loc_orc] - Stage: Stage-0 Fetch Operator limit: -1 Processor Tree: - ListSink + TableScan + alias: loc_orc + Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE + GatherStats: false + Select Operator + expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE + ListSink PREHOOK: query: -- numRows: 0 rawDataSize: 0 explain extended select * from loc_orc where !true @@ -1281,7 +1200,7 @@ GatherStats: false Filter Operator isSamplingPred: false - predicate: (not true) (type: boolean) + predicate: false (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) @@ -1309,13 +1228,13 @@ GatherStats: false MultiFileSpray: false Path -> Alias: -#### A masked pattern was here #### + -mr-10002default.loc_orc{} [loc_orc] Path -> Partition: -#### A masked pattern was here #### + -mr-10002default.loc_orc{} Partition base file name: loc_orc - input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: COLUMN_STATS_ACCURATE true bucket_count -1 @@ -1330,10 +1249,10 @@ rawDataSize 796 serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year} serialization.format | - serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe totalSize 493 #### A masked pattern was here #### - serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat @@ -1358,7 +1277,7 @@ name: default.loc_orc name: default.loc_orc Truncated Path -> Alias: - /loc_orc [loc_orc] + -mr-10002default.loc_orc{} [loc_orc] Stage: Stage-0 Fetch Operator @@ -1393,103 +1312,22 @@ STAGE DEPENDENCIES: - Stage-1 is a root stage - Stage-0 depends on stages: Stage-1 + Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Map Operator Tree: - TableScan - alias: loc_orc - Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE - GatherStats: false - Filter Operator - isSamplingPred: false - predicate: true (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE - Select Operator - expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:int:bigint:int - escape.delim \ - hive.serialization.extend.nesting.levels true - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false - Path -> Alias: -#### A masked pattern was here #### - Path -> Partition: -#### A masked pattern was here #### - Partition - base file name: loc_orc - input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat - properties: - COLUMN_STATS_ACCURATE true - bucket_count -1 - columns state,locid,zip,year - columns.comments - columns.types string:int:bigint:int - field.delim | -#### A masked pattern was here #### - name default.loc_orc - numFiles 1 - numRows 8 - rawDataSize 796 - serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year} - serialization.format | - serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 493 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - - input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat - properties: - COLUMN_STATS_ACCURATE true - bucket_count -1 - columns state,locid,zip,year - columns.comments - columns.types string:int:bigint:int - field.delim | -#### A masked pattern was here #### - name default.loc_orc - numFiles 1 - numRows 8 - rawDataSize 796 - serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year} - serialization.format | - serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 493 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.loc_orc - name: default.loc_orc - Truncated Path -> Alias: - /loc_orc [loc_orc] - Stage: Stage-0 Fetch Operator limit: -1 Processor Tree: - ListSink + TableScan + alias: loc_orc + Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE + GatherStats: false + Select Operator + expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE + ListSink PREHOOK: query: -- numRows: 8 rawDataSize: 804 explain extended select * from loc_orc where 'foo' @@ -1641,103 +1479,22 @@ STAGE DEPENDENCIES: - Stage-1 is a root stage - Stage-0 depends on stages: Stage-1 + Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Map Operator Tree: - TableScan - alias: loc_orc - Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE - GatherStats: false - Filter Operator - isSamplingPred: false - predicate: (true = true) (type: boolean) - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE - Select Operator - expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 - Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:int:bigint:int - escape.delim \ - hive.serialization.extend.nesting.levels true - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false - Path -> Alias: -#### A masked pattern was here #### - Path -> Partition: -#### A masked pattern was here #### - Partition - base file name: loc_orc - input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat - properties: - COLUMN_STATS_ACCURATE true - bucket_count -1 - columns state,locid,zip,year - columns.comments - columns.types string:int:bigint:int - field.delim | -#### A masked pattern was here #### - name default.loc_orc - numFiles 1 - numRows 8 - rawDataSize 796 - serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year} - serialization.format | - serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 493 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - - input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat - properties: - COLUMN_STATS_ACCURATE true - bucket_count -1 - columns state,locid,zip,year - columns.comments - columns.types string:int:bigint:int - field.delim | -#### A masked pattern was here #### - name default.loc_orc - numFiles 1 - numRows 8 - rawDataSize 796 - serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year} - serialization.format | - serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 493 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.loc_orc - name: default.loc_orc - Truncated Path -> Alias: - /loc_orc [loc_orc] - Stage: Stage-0 Fetch Operator limit: -1 Processor Tree: - ListSink + TableScan + alias: loc_orc + Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE + GatherStats: false + Select Operator + expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE + ListSink PREHOOK: query: -- numRows: 0 rawDataSize: 0 explain extended select * from loc_orc where false = true @@ -1779,7 +1536,7 @@ GatherStats: false Filter Operator isSamplingPred: false - predicate: (false = true) (type: boolean) + predicate: false (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) @@ -1807,13 +1564,13 @@ GatherStats: false MultiFileSpray: false Path -> Alias: -#### A masked pattern was here #### + -mr-10002default.loc_orc{} [loc_orc] Path -> Partition: -#### A masked pattern was here #### + -mr-10002default.loc_orc{} Partition base file name: loc_orc - input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: COLUMN_STATS_ACCURATE true bucket_count -1 @@ -1828,10 +1585,10 @@ rawDataSize 796 serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year} serialization.format | - serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe totalSize 493 #### A masked pattern was here #### - serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat @@ -1856,7 +1613,7 @@ name: default.loc_orc name: default.loc_orc Truncated Path -> Alias: - /loc_orc [loc_orc] + -mr-10002default.loc_orc{} [loc_orc] Stage: Stage-0 Fetch Operator @@ -1904,7 +1661,7 @@ GatherStats: false Filter Operator isSamplingPred: false - predicate: ('foo' = 'bar') (type: boolean) + predicate: false (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) @@ -1932,13 +1689,13 @@ GatherStats: false MultiFileSpray: false Path -> Alias: -#### A masked pattern was here #### + -mr-10002default.loc_orc{} [loc_orc] Path -> Partition: -#### A masked pattern was here #### + -mr-10002default.loc_orc{} Partition base file name: loc_orc - input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: COLUMN_STATS_ACCURATE true bucket_count -1 @@ -1953,10 +1710,10 @@ rawDataSize 796 serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year} serialization.format | - serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe totalSize 493 #### A masked pattern was here #### - serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + serde: org.apache.hadoop.hive.serde2.NullStructSerDe input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat @@ -1981,7 +1738,7 @@ name: default.loc_orc name: default.loc_orc Truncated Path -> Alias: - /loc_orc [loc_orc] + -mr-10002default.loc_orc{} [loc_orc] Stage: Stage-0 Fetch Operator @@ -2296,7 +2053,7 @@ predicate: ((year = 2001) and year is null) (type: boolean) Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) + expressions: state (type: string), locid (type: int), zip (type: bigint), null (type: void) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator @@ -2432,7 +2189,7 @@ predicate: (((year = 2001) and (state = 'OH')) and (state = 'FL')) (type: boolean) Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) + expressions: 'FL' (type: string), locid (type: int), zip (type: bigint), 2001 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator @@ -2708,7 +2465,7 @@ predicate: (((year = 2001) or year is null) and (state = 'CA')) (type: boolean) Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int) + expressions: 'CA' (type: string), locid (type: int), zip (type: bigint), year (type: int) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator Index: ql/src/test/results/clientpositive/udf_instr.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_instr.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_instr.q.out (working copy) @@ -56,7 +56,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: instr('abcd''abc') (type: int), instr('abcabc''ccc') (type: int), instr(123'23') (type: int), instr(12323) (type: int), instr(true1) (type: int), instr(false1) (type: int), instr('12345'UDFToByte('2')) (type: int), instr(UDFToShort('12345')'34') (type: int), instr(UDFToLong('123456789012')'456') (type: int), instr(UDFToFloat(1.25)'.25') (type: int), instr(16.0'.0') (type: int), instr(null'abc') (type: int), instr('abcd'null) (type: int) + expressions: 1 (type: int), 0 (type: int), 2 (type: int), 2 (type: int), 0 (type: int), 0 (type: int), 2 (type: int), 3 (type: int), 4 (type: int), 2 (type: int), 3 (type: int), instr(null'abc') (type: int), instr('abcd'null) (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/groupby_sort_1_23.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_1_23.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/groupby_sort_1_23.q.out (working copy) @@ -6464,11 +6464,11 @@ predicate: (key = 8) (type: boolean) Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), val (type: string) - outputColumnNames: _col0, _col1 + expressions: val (type: string) + outputColumnNames: _col1 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string) + expressions: '8' (type: string) outputColumnNames: _col0 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator @@ -6485,7 +6485,7 @@ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Select Operator - expressions: _col0 (type: string), _col1 (type: string) + expressions: '8' (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator Index: ql/src/test/results/clientpositive/ppd_repeated_alias.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_repeated_alias.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/ppd_repeated_alias.q.out (working copy) @@ -72,7 +72,7 @@ predicate: (_col6 = 3) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col5 (type: int), _col6 (type: int) + expressions: _col0 (type: int), _col5 (type: int), 3 (type: int) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator @@ -206,18 +206,17 @@ sort order: + Map-reduce partition columns: foo (type: int) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - value expressions: bar (type: int) Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: - 0 {KEY.reducesinkkey0} {VALUE._col0} + 0 {KEY.reducesinkkey0} 1 {KEY.reducesinkkey0} - outputColumnNames: _col0, _col1, _col5 + outputColumnNames: _col0, _col5 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: _col0 (type: int), _col5 (type: int), _col1 (type: int) + expressions: _col0 (type: int), _col5 (type: int), 3 (type: int) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/cast1.q.out =================================================================== --- ql/src/test/results/clientpositive/cast1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/cast1.q.out (working copy) @@ -27,17 +27,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 86) (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: (3 + 2) (type: int), (3.0 + 2) (type: double), (3 + 2.0) (type: double), (3.0 + 2.0) (type: double), ((3 + UDFToInteger(2.0)) + UDFToInteger(UDFToShort(0))) (type: int), UDFToBoolean(1) (type: boolean), UDFToInteger(true) (type: int) + expressions: 5 (type: int), 5.0 (type: double), 5.0 (type: double), 5.0 (type: double), 5 (type: int), true (type: boolean), 1 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/annotate_stats_part.q.out =================================================================== --- ql/src/test/results/clientpositive/annotate_stats_part.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/annotate_stats_part.q.out (working copy) @@ -965,7 +965,7 @@ predicate: ((year = '2001') and (year = '__HIVE_DEFAULT_PARTITION__')) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string) + expressions: state (type: string), locid (type: int), zip (type: bigint), '__HIVE_DEFAULT_PARTITION__' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE ListSink Index: ql/src/test/results/clientpositive/list_bucket_dml_7.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_7.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_7.q.out (working copy) @@ -930,7 +930,7 @@ predicate: ((key = '484') and (value = 'val_484')) (type: boolean) Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out =================================================================== --- ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out (working copy) @@ -102,22 +102,22 @@ Map Operator Tree: TableScan alias: test_tab - Statistics: Num rows: 8 Data size: 1761 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1761 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (key = '484') (type: boolean) - Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), part (type: string) + expressions: '484' (type: string), value (type: string), part (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -241,22 +241,22 @@ Map Operator Tree: TableScan alias: test_tab - Statistics: Num rows: 8 Data size: 1761 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1761 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (key = '0') (type: boolean) - Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), part (type: string) + expressions: '0' (type: string), value (type: string), part (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat Index: ql/src/test/results/clientpositive/index_stale.q.out =================================================================== --- ql/src/test/results/clientpositive/index_stale.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/index_stale.q.out (working copy) @@ -71,7 +71,7 @@ predicate: (key = 86) (type: boolean) Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), val (type: string) + expressions: '86' (type: string), val (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/type_widening.q.out =================================================================== --- ql/src/test/results/clientpositive/type_widening.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/type_widening.q.out (working copy) @@ -16,7 +16,7 @@ alias: src Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: COALESCE(0,9223372036854775807) (type: bigint) + expressions: 0 (type: bigint) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit Index: ql/src/test/results/clientpositive/subquery_exists_having.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_exists_having.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_exists_having.q.out (working copy) @@ -104,20 +104,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint) + outputColumnNames: _col0, _col1 Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: bigint) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -244,20 +241,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint) + outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: bigint) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Mux Operator Statistics: Num rows: 51 Data size: 5862 Basic stats: COMPLETE Column stats: NONE Join Operator @@ -268,20 +262,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint) + outputColumnNames: _col0, _col1 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: bigint) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/subquery_exists.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_exists.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_exists.q.out (working copy) @@ -66,20 +66,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/list_bucket_dml_2.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_2.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/list_bucket_dml_2.q.out (working copy) @@ -417,7 +417,7 @@ predicate: ((key = '484') and (value = 'val_484')) (type: boolean) Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) + expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out =================================================================== --- ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out (working copy) @@ -119,20 +119,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 3 Data size: 661 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 3 Data size: 661 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 3 Data size: 661 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 3 Data size: 661 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -192,20 +189,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -341,20 +335,17 @@ 1 outputColumnNames: _col1, _col2, _col5 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Select Operator - expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) - outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -488,20 +479,17 @@ 1 outputColumnNames: _col1, _col2, _col5 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - Select Operator - expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) - outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -609,20 +597,17 @@ 1 outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string) + outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 + File Output Operator + compressed: false Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -718,20 +703,17 @@ 1 outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: (1 = 1) (type: boolean) + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) - outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-3 Map Reduce @@ -877,7 +859,7 @@ outputColumnNames: _col1, _col2, _col5, _col11 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Filter Operator - predicate: ((1 = 1) and _col11 is null) (type: boolean) + predicate: _col11 is null (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int) Index: ql/src/test/results/clientpositive/type_cast_1.q.out =================================================================== --- ql/src/test/results/clientpositive/type_cast_1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/type_cast_1.q.out (working copy) @@ -16,7 +16,7 @@ alias: src Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: (if(false, 1, UDFToShort(2)) + 3) (type: int) + expressions: 5 (type: int) outputColumnNames: _col0 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Limit Index: ql/src/test/results/clientpositive/transform_ppr1.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/transform_ppr1.q.out (working copy) @@ -130,7 +130,7 @@ Map-reduce partition columns: _col1 (type: string) Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE tag: -1 - value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) + value expressions: '2008-04-08' (type: string), _col1 (type: string), _col2 (type: string) auto parallelism: false Path -> Alias: #### A masked pattern was here #### Index: ql/src/test/results/clientpositive/regexp_extract.q.out =================================================================== --- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy) @@ -98,7 +98,7 @@ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator - expressions: key (type: string), value (type: string), (1 + 2) (type: int), (3 + 4) (type: int) + expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Transform Operator @@ -413,7 +413,7 @@ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator - expressions: key (type: string), value (type: string), (1 + 2) (type: int), (3 + 4) (type: int) + expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Transform Operator Index: ql/src/test/results/clientpositive/join_view.q.out =================================================================== --- ql/src/test/results/clientpositive/join_view.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/join_view.q.out (working copy) @@ -50,8 +50,8 @@ Inner Join 0 to 1 condition expressions: 0 {VALUE._col1} - 1 {VALUE._col0} {KEY.reducesinkkey0} - outputColumnNames: _col1, _col5, _col7 + 1 {VALUE._col0} + outputColumnNames: _col1, _col5 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator expressions: _col1 (type: string), _col5 (type: int), _col7 (type: string) Index: ql/src/test/results/clientpositive/udf_repeat.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_repeat.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_repeat.q.out (working copy) @@ -38,7 +38,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: repeat('Facebook', 3) (type: string), repeat('', 4) (type: string), repeat('asd', 0) (type: string), repeat('asdf', (- 1)) (type: string) + expressions: 'FacebookFacebookFacebook' (type: string), '' (type: string), '' (type: string), '' (type: string) outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/udf_elt.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_elt.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/udf_elt.q.out (working copy) @@ -52,7 +52,7 @@ Row Limit Per Split: 1 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE Select Operator - expressions: elt(2, 'abc', 'defg') (type: string), elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg') (type: string), elt('1', 'abc', 'defg') (type: string), elt(2, 'aa', UDFToByte('2')) (type: string), elt(2, 'aa', UDFToShort('12345')) (type: string), elt(2, 'aa', UDFToLong('123456789012')) (type: string), elt(2, 'aa', UDFToFloat(1.25)) (type: string), elt(2, 'aa', 16.0) (type: string), elt(null, 'abc', 'defg') (type: string), elt(0, 'abc', 'defg') (type: string), elt(3, 'abc', 'defg') (type: string) + expressions: 'defg' (type: string), 'cc' (type: string), 'abc' (type: string), '2' (type: string), '12345' (type: string), '123456789012' (type: string), '1.25' (type: string), '16.0' (type: string), elt(null, 'abc', 'defg') (type: string), null (type: void), null (type: void) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE ListSink Index: ql/src/test/results/clientpositive/ppd_union_view.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_union_view.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/ppd_union_view.q.out (working copy) @@ -465,12 +465,12 @@ condition map: Inner Join 0 to 1 condition expressions: - 0 {VALUE._col0} {KEY.reducesinkkey1} + 0 {VALUE._col0} 1 {VALUE._col0} - outputColumnNames: _col1, _col2, _col5 + outputColumnNames: _col1, _col5 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator - expressions: _col5 (type: string), _col1 (type: string), _col2 (type: string) + expressions: _col5 (type: string), _col1 (type: string), '2011-10-15' (type: string) outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE File Output Operator Index: ql/src/test/results/clientpositive/constprog1.q.out =================================================================== --- ql/src/test/results/clientpositive/constprog1.q.out (revision 0) +++ ql/src/test/results/clientpositive/constprog1.q.out (revision 0) @@ -0,0 +1,37 @@ +PREHOOK: query: EXPLAIN +SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1") + FROM src tablesample (1 rows) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1") + FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE + Select Operator + expressions: 'F1' (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE + ListSink + +PREHOOK: query: SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1") + FROM src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1") + FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +F1 Index: ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out =================================================================== --- ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out (working copy) @@ -319,7 +319,7 @@ alias: orc_pred Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((t is not null and (t < 0)) and (t > (- 2))) (type: boolean) + predicate: ((t is not null and (t < 0)) and (t > -2)) (type: boolean) Statistics: Num rows: 58 Data size: 17204 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint) @@ -378,10 +378,10 @@ Map Operator Tree: TableScan alias: orc_pred - filterExpr: ((t is not null and (t < 0)) and (t > (- 2))) (type: boolean) + filterExpr: ((t is not null and (t < 0)) and (t > -2)) (type: boolean) Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: ((t is not null and (t < 0)) and (t > (- 2))) (type: boolean) + predicate: ((t is not null and (t < 0)) and (t > -2)) (type: boolean) Statistics: Num rows: 58 Data size: 17204 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint) @@ -474,10 +474,10 @@ alias: orc_pred Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((t = (- 1)) and s is not null) and (s like 'bob%')) (type: boolean) + predicate: (((t = -1) and s is not null) and (s like 'bob%')) (type: boolean) Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: t (type: tinyint), s (type: string) + expressions: -1 (type: tinyint), s (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -514,13 +514,13 @@ Map Operator Tree: TableScan alias: orc_pred - filterExpr: (((t = (- 1)) and s is not null) and (s like 'bob%')) (type: boolean) + filterExpr: (((t = -1) and s is not null) and (s like 'bob%')) (type: boolean) Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((t = (- 1)) and s is not null) and (s like 'bob%')) (type: boolean) + predicate: (((t = -1) and s is not null) and (s like 'bob%')) (type: boolean) Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: t (type: tinyint), s (type: string) + expressions: -1 (type: tinyint), s (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -605,7 +605,7 @@ alias: orc_pred Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((s is not null and (s like 'bob%')) and (not (t) IN ((- 1), (- 2), (- 3)))) and t BETWEEN 25 AND 30) (type: boolean) + predicate: (((s is not null and (s like 'bob%')) and (not (t) IN (-1, -2, -3))) and t BETWEEN 25 AND 30) (type: boolean) Statistics: Num rows: 65 Data size: 19281 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), s (type: string) @@ -658,10 +658,10 @@ Map Operator Tree: TableScan alias: orc_pred - filterExpr: (((s is not null and (s like 'bob%')) and (not (t) IN ((- 1), (- 2), (- 3)))) and t BETWEEN 25 AND 30) (type: boolean) + filterExpr: (((s is not null and (s like 'bob%')) and (not (t) IN (-1, -2, -3))) and t BETWEEN 25 AND 30) (type: boolean) Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((s is not null and (s like 'bob%')) and (not (t) IN ((- 1), (- 2), (- 3)))) and t BETWEEN 25 AND 30) (type: boolean) + predicate: (((s is not null and (s like 'bob%')) and (not (t) IN (-1, -2, -3))) and t BETWEEN 25 AND 30) (type: boolean) Statistics: Num rows: 65 Data size: 19281 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), s (type: string) @@ -782,7 +782,7 @@ alias: orc_pred Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((((((d >= round(9.99)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) + predicate: (((((((d >= 10.0) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) @@ -847,10 +847,10 @@ Map Operator Tree: TableScan alias: orc_pred - filterExpr: (((((((d >= round(9.99)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) + filterExpr: (((((((d >= 10.0) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((((((d >= round(9.99)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) + predicate: (((((((d >= 10.0) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) @@ -984,7 +984,7 @@ alias: orc_pred Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((((((((t > 10) and (t <> 101)) and (d >= round(9.99))) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) + predicate: (((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) @@ -1078,10 +1078,10 @@ Map Operator Tree: TableScan alias: orc_pred - filterExpr: (((((((((t > 10) and (t <> 101)) and (d >= round(9.99))) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) + filterExpr: (((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator - predicate: (((((((((t > 10) and (t <> 101)) and (d >= round(9.99))) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) + predicate: (((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean) Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) Index: ql/src/test/results/clientpositive/cte_1.q.out =================================================================== --- ql/src/test/results/clientpositive/cte_1.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/cte_1.q.out (working copy) @@ -18,17 +18,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5812 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = '5') (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string) + expressions: '5' (type: string) outputColumnNames: _col0 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -75,17 +75,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5812 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = '5') (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string) + expressions: '5' (type: string) outputColumnNames: _col0 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -132,17 +132,17 @@ Map Operator Tree: TableScan alias: src - Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5812 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = '5') (type: boolean) - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: key (type: string) + expressions: '5' (type: string) outputColumnNames: _col0 - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2906 Data size: 2906 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/clientpositive/input23.q.out =================================================================== --- ql/src/test/results/clientpositive/input23.q.out (revision 1613528) +++ ql/src/test/results/clientpositive/input23.q.out (working copy) @@ -133,11 +133,11 @@ Inner Join 0 to 1 condition expressions: 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} - 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} - outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7, _col8, _col9 + 1 {VALUE._col0} {VALUE._col1} + outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col6 (type: string), _col7 (type: string), _col8 (type: string), _col9 (type: string) + expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col6 (type: string), _col7 (type: string), '2008-04-08' (type: string), '14' (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE Limit Index: ql/src/test/results/compiler/plan/input6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input6.q.xml (revision 1613528) +++ ql/src/test/results/compiler/plan/input6.q.xml (working copy) @@ -797,17 +797,7 @@ _col0 - - - key - - - src1 - - - - - + @@ -816,7 +806,7 @@ - + @@ -848,6 +838,9 @@ + + key + _col0 @@ -932,6 +925,9 @@ + + key + key Index: ql/src/test/results/compiler/plan/join8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join8.q.xml (revision 1613528) +++ ql/src/test/results/compiler/plan/join8.q.xml (working copy) @@ -1870,9 +1870,15 @@ + + c3 + _col2 + + b + Index: ql/src/test/results/compiler/plan/udf1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf1.q.xml (revision 1613528) +++ ql/src/test/results/compiler/plan/udf1.q.xml (working copy) @@ -534,7 +534,7 @@ _col8 - + @@ -579,7 +579,7 @@ _col7 - + @@ -624,7 +624,7 @@ _col6 - + @@ -669,7 +669,7 @@ _col5 - + @@ -714,7 +714,7 @@ _col4 - + @@ -759,7 +759,7 @@ _col3 - + @@ -804,7 +804,7 @@ _col2 - + @@ -849,7 +849,7 @@ _col1 - + @@ -894,7 +894,7 @@ _col9 - + @@ -939,7 +939,7 @@ _col13 - + @@ -991,7 +991,7 @@ _col12 - + @@ -1036,7 +1036,7 @@ _col11 - + @@ -1081,7 +1081,7 @@ _col10 - + @@ -1126,7 +1126,7 @@ _col16 - + @@ -1178,7 +1178,7 @@ _col15 - + @@ -1230,7 +1230,7 @@ _col14 - + @@ -1282,7 +1282,7 @@ _col0 - + @@ -1332,55 +1332,174 @@ - + + + + + + true + + - + + + + + + false + + - + + + + + + true + + - + + + + + + true + + - + + + + + + true + + - + + + + + + false + + - + + + + + + false + + - + + + + + + false + + - + + + + + + true + + - + + + + + + true + + - + + + + + + false + + - + + + + + + true + + - + + + + + + true + + - + + + + + + acc + + - + + + + + + abc + + - + + + + + + abb + + - + + + + + + hive + + @@ -1788,6 +1907,9 @@ + + key + key Index: ql/src/test/results/compiler/plan/udf4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf4.q.xml (revision 1613528) +++ ql/src/test/results/compiler/plan/udf4.q.xml (working copy) @@ -547,7 +547,7 @@ _col8 - + @@ -579,7 +579,7 @@ _col7 - + @@ -625,7 +625,7 @@ _col6 - + @@ -657,7 +657,7 @@ _col5 - + @@ -696,7 +696,7 @@ _col4 - + @@ -721,7 +721,7 @@ _col3 - + @@ -746,7 +746,7 @@ _col2 - + @@ -785,7 +785,7 @@ _col1 - + @@ -810,7 +810,7 @@ _col9 - + @@ -835,7 +835,7 @@ _col13 - + @@ -867,7 +867,7 @@ _col12 - + @@ -892,7 +892,7 @@ _col11 - + @@ -931,7 +931,7 @@ _col10 - + @@ -956,7 +956,7 @@ _col17 - + @@ -1009,7 +1009,7 @@ _col16 - + @@ -1048,7 +1048,7 @@ _col15 - + @@ -1084,7 +1084,7 @@ _col0 - + @@ -1109,7 +1109,7 @@ _col18 - + @@ -1149,61 +1149,173 @@ - + + + + + + 1.0 + + - + + + + + + 2.0 + + - + + + + + + -2.0 + + - + + + + + + 1 + + - + + + + + + 1 + + - + + + + + + -2 + + - + + + + + + 1.0 + + - + - + + + + + + 0.0 + + - + + + + + + 1 + + - + + + + + + 2 + + - + + + + + + -1 + + - + + + + + + 1 + + - + - + + + + + + -3 + + - + + + + + + 3 + + - + + + + + + -1 + + - + + + + + + -2 + + Index: ql/src/test/results/compiler/plan/udf6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf6.q.xml (revision 1613528) +++ ql/src/test/results/compiler/plan/udf6.q.xml (working copy) @@ -291,7 +291,7 @@ _col1 - + @@ -340,7 +340,7 @@ _col0 - + @@ -380,10 +380,24 @@ - + + + + + + ab + + - + + + + + + 1 + + Index: ql/src/test/results/compiler/plan/udf_case.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_case.q.xml (revision 1613528) +++ ql/src/test/results/compiler/plan/udf_case.q.xml (working copy) @@ -346,7 +346,7 @@ _col1 - + @@ -411,7 +411,7 @@ _col0 - + @@ -491,10 +491,17 @@ - + + + + + + 2 + + - + Index: ql/src/test/results/compiler/plan/udf_when.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_when.q.xml (revision 1613528) +++ ql/src/test/results/compiler/plan/udf_when.q.xml (working copy) @@ -346,7 +346,7 @@ _col1 - + @@ -453,7 +453,7 @@ _col0 - + @@ -571,10 +571,17 @@ - + + + + + + 2 + + - + Index: ql/src/test/results/compiler/plan/cast1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/cast1.q.xml (revision 1613528) +++ ql/src/test/results/compiler/plan/cast1.q.xml (working copy) @@ -378,7 +378,7 @@ _col6 - + @@ -410,7 +410,7 @@ _col5 - + @@ -442,7 +442,7 @@ _col4 - + @@ -502,7 +502,7 @@ _col3 - + @@ -541,7 +541,7 @@ _col2 - + @@ -580,7 +580,7 @@ _col1 - + @@ -619,7 +619,7 @@ _col0 - + @@ -663,25 +663,74 @@ - + + + + + + 5 + + - + + + + + + 5.0 + + - + + + + + + 5.0 + + - + + + + + + 5.0 + + - + + + + + + 5 + + - + + + + + + true + + - + + + + + + 1 + + @@ -899,6 +948,9 @@ + + key + key Index: ql/src/test/queries/clientpositive/union27.q =================================================================== --- ql/src/test/queries/clientpositive/union27.q (revision 1613528) +++ ql/src/test/queries/clientpositive/union27.q (working copy) @@ -1,4 +1,5 @@ create table jackson_sev_same as select * from src; create table dim_pho as select * from src; create table jackson_sev_add as select * from src; +explain select b.* from jackson_sev_same a join (select * from dim_pho union all select * from jackson_sev_add)b on a.key=b.key and b.key=97; select b.* from jackson_sev_same a join (select * from dim_pho union all select * from jackson_sev_add)b on a.key=b.key and b.key=97; Index: ql/src/test/queries/clientpositive/constprog2.q =================================================================== --- ql/src/test/queries/clientpositive/constprog2.q (revision 0) +++ ql/src/test/queries/clientpositive/constprog2.q (revision 0) @@ -0,0 +1,10 @@ +set hive.fetch.task.conversion=more; +set hive.optimize.constant.propagation=true; + +EXPLAIN +SELECT src1.key, src1.key + 1, src2.value + FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86; + +SELECT src1.key, src1.key + 1, src2.value + FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86; + Index: ql/src/test/queries/clientpositive/constprog_dp.q =================================================================== --- ql/src/test/queries/clientpositive/constprog_dp.q (revision 0) +++ ql/src/test/queries/clientpositive/constprog_dp.q (revision 0) @@ -0,0 +1,11 @@ +set hive.optimize.constant.propagation=true; +set hive.exec.dynamic.partition.mode=nonstrict; + +create table dest(key string, value string) partitioned by (ds string); + +EXPLAIN +from srcpart +insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08'; + +from srcpart +insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08'; Index: ql/src/test/queries/clientpositive/subquery_views.q =================================================================== --- ql/src/test/queries/clientpositive/subquery_views.q (revision 1613528) +++ ql/src/test/queries/clientpositive/subquery_views.q (working copy) @@ -26,10 +26,15 @@ ) ; +explain select * from cv2 where cv2.key in (select key from cv2 c where c.key < '11'); ; +select * +from cv2 where cv2.key in (select key from cv2 c where c.key < '11'); +; + -- in where + having create view cv3 as select key, value, count(*) Index: ql/src/test/queries/clientpositive/constprog1.q =================================================================== --- ql/src/test/queries/clientpositive/constprog1.q (revision 0) +++ ql/src/test/queries/clientpositive/constprog1.q (revision 0) @@ -0,0 +1,9 @@ +set hive.fetch.task.conversion=more; +set hive.optimize.constant.propagation=true; + +EXPLAIN +SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1") + FROM src tablesample (1 rows); + +SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1") + FROM src tablesample (1 rows); Index: ql/src/test/queries/clientpositive/smb_mapjoin_18.q =================================================================== --- ql/src/test/queries/clientpositive/smb_mapjoin_18.q (revision 1613528) +++ ql/src/test/queries/clientpositive/smb_mapjoin_18.q (working copy) @@ -36,8 +36,10 @@ select count(*) from test_table2 tablesample (bucket 1 out of 2) s where ds = '1'; select count(*) from test_table2 tablesample (bucket 2 out of 2) s where ds = '1'; +set hive.optimize.constant.propagation=false; -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation, one of the buckets should be empty + EXPLAIN INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '2') SELECT a.key, a.value FROM test_table1 a WHERE a.ds = '1' and a.key = 238; @@ -45,6 +47,7 @@ INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '2') SELECT a.key, a.value FROM test_table1 a WHERE a.ds = '1' and a.key = 238; +set hive.optimize.constant.propagation=true; select count(*) from test_table2 where ds = '2'; select count(*) from test_table2 where ds = '2' and hash(key) % 2 = 0; select count(*) from test_table2 where ds = '2' and hash(key) % 2 = 1; Index: ql/src/test/queries/clientpositive/constprog_type.q =================================================================== --- ql/src/test/queries/clientpositive/constprog_type.q (revision 0) +++ ql/src/test/queries/clientpositive/constprog_type.q (revision 0) @@ -0,0 +1,14 @@ +set hive.optimize.constant.propagation=true; + +CREATE TABLE dest1(d date, t timestamp); + +EXPLAIN +INSERT OVERWRITE TABLE dest1 +SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp) + FROM src tablesample (1 rows); + +INSERT OVERWRITE TABLE dest1 +SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp) + FROM src tablesample (1 rows); + +SELECT * FROM dest1; Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java (revision 0) @@ -0,0 +1,213 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer; + + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.exec.ColumnInfo; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.UnionOperator; +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.parse.OpParseContext; +import org.apache.hadoop.hive.ql.parse.RowResolver; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; + +/** + * This class implements the processor context for Constant Propagate. + * + * ConstantPropagateProcCtx keeps track of propagated constants in a column->const map for each + * operator, enabling constants to be revolved across operators. + */ +public class ConstantPropagateProcCtx implements NodeProcessorCtx { + + private static final org.apache.commons.logging.Log LOG = LogFactory + .getLog(ConstantPropagateProcCtx.class); + + private final Map, Map> opToConstantExprs; + private final Map, OpParseContext> opToParseCtx; + private final List> opToDelete; + + public ConstantPropagateProcCtx(Map, OpParseContext> opToParseCtx) { + opToConstantExprs = + new HashMap, Map>(); + opToDelete = new ArrayList>(); + this.opToParseCtx = opToParseCtx; + } + + public Map, Map> getOpToConstantExprs() { + return opToConstantExprs; + } + + + public Map, OpParseContext> getOpToParseCtxMap() { + return opToParseCtx; + } + + /** + * Resolve a ColumnInfo based on given RowResolver. + * + * @param ci + * @param rr + * @param parentRR + * @return + * @throws SemanticException + */ + private ColumnInfo resolve(ColumnInfo ci, RowResolver rr, RowResolver parentRR) + throws SemanticException { + // Resolve new ColumnInfo from + String alias = ci.getAlias(); + if (alias == null) { + alias = ci.getInternalName(); + } + String tblAlias = ci.getTabAlias(); + ColumnInfo rci = rr.get(tblAlias, alias); + if (rci == null && rr.getRslvMap().size() == 1 && parentRR.getRslvMap().size() == 1) { + rci = rr.get(null, alias); + } + if (rci == null) { + return null; + } + String[] tmp = rr.reverseLookup(rci.getInternalName()); + rci.setTabAlias(tmp[0]); + rci.setAlias(tmp[1]); + LOG.debug("Resolved " + + ci.getTabAlias() + "." + ci.getAlias() + " as " + + rci.getTabAlias() + "." + rci.getAlias() + " with rr: " + rr); + return rci; + } + + /** + * Get propagated constant map from parents. + * + * Traverse all parents of current operator, if there is propagated constant (determined by + * assignment expression like column=constant value), resolve the column using RowResolver and add + * it to current constant map. + * + * @param op + * operator getting the propagated constants. + * @return map of ColumnInfo to ExprNodeDesc. The values of that map must be either + * ExprNodeConstantDesc or ExprNodeNullDesc. + */ + public Map getPropagatedConstants( + Operator op) { + Map constants = new HashMap(); + OpParseContext parseCtx = opToParseCtx.get(op); + if (parseCtx == null) { + return constants; + } + RowResolver rr = parseCtx.getRowResolver(); + LOG.debug("Getting constants of op:" + op + " with rr:" + rr); + + try { + if (op.getParentOperators() == null) { + return constants; + } + + if (op instanceof UnionOperator) { + String alias = (String) rr.getRslvMap().keySet().toArray()[0]; + // find intersection + Map intersection = null; + for (Operator parent : op.getParentOperators()) { + Map unionConst = opToConstantExprs.get(parent); + LOG.debug("Constant of op " + parent.getOperatorId() + " " + unionConst); + if (intersection == null) { + intersection = new HashMap(); + for (Entry e : unionConst.entrySet()) { + ColumnInfo ci = new ColumnInfo(e.getKey()); + ci.setTabAlias(alias); + intersection.put(ci, e.getValue()); + } + } else { + Iterator> itr = intersection.entrySet().iterator(); + while (itr.hasNext()) { + Entry e = itr.next(); + boolean found = false; + for (Entry f : opToConstantExprs.get(parent).entrySet()) { + if (e.getKey().getInternalName().equals(f.getKey().getInternalName())) { + if (e.getValue().isSame(f.getValue())) { + found = true; + } + break; + } + } + if (!found) { + itr.remove(); + } + } + } + if (intersection.isEmpty()) { + return intersection; + } + } + LOG.debug("Propagated union constants:" + intersection); + return intersection; + } + + for (Operator parent : op.getParentOperators()) { + Map c = opToConstantExprs.get(parent); + for (Entry e : c.entrySet()) { + ColumnInfo ci = e.getKey(); + ColumnInfo rci = null; + ExprNodeDesc constant = e.getValue(); + rci = resolve(ci, rr, opToParseCtx.get(parent).getRowResolver()); + if (rci != null) { + constants.put(rci, constant); + } else { + LOG.debug("Can't resolve " + ci.getTabAlias() + "." + ci.getAlias() + " from rr:" + + rr); + } + + } + + } + LOG.debug("Offerring constants " + constants.keySet() + + " to operator " + op.toString()); + return constants; + } catch (SemanticException e) { + LOG.error(e.getMessage(), e); + throw new RuntimeException(e); + } + } + + public RowResolver getRowResolver(Operator op) { + OpParseContext parseCtx = opToParseCtx.get(op); + if (parseCtx == null) { + return null; + } + return parseCtx.getRowResolver(); + } + + public void addOpToDelete(Operator op) { + opToDelete.add(op); + } + + public List> getOpToDelete() { + return opToDelete; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (working copy) @@ -30,6 +30,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; @@ -87,6 +88,7 @@ * Node Processor for Column Pruning on Filter Operators. */ public static class ColumnPrunerFilterProc implements NodeProcessor { + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { FilterOperator op = (FilterOperator) nd; @@ -120,6 +122,7 @@ * Node Processor for Column Pruning on Group By Operators. */ public static class ColumnPrunerGroupByProc implements NodeProcessor { + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { GroupByOperator op = (GroupByOperator) nd; @@ -154,6 +157,7 @@ } public static class ColumnPrunerScriptProc implements NodeProcessor { + @Override @SuppressWarnings("unchecked") public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { @@ -224,6 +228,7 @@ * and update the RR & signature on the PTFOp. */ public static class ColumnPrunerPTFProc extends ColumnPrunerScriptProc { + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { @@ -327,6 +332,7 @@ * The Default Node Processor for Column Pruning. */ public static class ColumnPrunerDefaultProc implements NodeProcessor { + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx; @@ -351,6 +357,7 @@ * store needed columns in tableScanDesc. */ public static class ColumnPrunerTableScanProc implements NodeProcessor { + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { TableScanOperator scanOp = (TableScanOperator) nd; @@ -426,6 +433,7 @@ * The Node Processor for Column Pruning on Reduce Sink Operators. */ public static class ColumnPrunerReduceSinkProc implements NodeProcessor { + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator op = (ReduceSinkOperator) nd; @@ -435,6 +443,7 @@ List colLists = new ArrayList(); ArrayList keys = conf.getKeyCols(); + LOG.debug("Reduce Sink Operator " + op.getIdentifier() + " key:" + keys); for (ExprNodeDesc key : keys) { colLists = Utilities.mergeUniqElems(colLists, key.getCols()); } @@ -456,7 +465,6 @@ if (childCols != null) { boolean[] flags = new boolean[valCols.size()]; - Map exprMap = op.getColumnExprMap(); for (String childCol : childCols) { int index = valColNames.indexOf(Utilities.removeValueTag(childCol)); @@ -497,6 +505,7 @@ * The Node Processor for Column Pruning on Lateral View Join Operators. */ public static class ColumnPrunerLateralViewJoinProc implements NodeProcessor { + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { LateralViewJoinOperator op = (LateralViewJoinOperator) nd; @@ -585,6 +594,7 @@ * The Node Processor for Column Pruning on Select Operators. */ public static class ColumnPrunerSelectProc implements NodeProcessor { + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { SelectOperator op = (SelectOperator) nd; @@ -748,6 +758,12 @@ nm = oldRR.reverseLookup(outputCol); } + // In case there are multiple columns referenced to the same column name, we won't + // do row resolve once more because the ColumnInfo in row resolver is already removed + if (nm == null) { + continue; + } + // Only remove information of a column if it is not a key, // i.e. this column is not appearing in keyExprs of the RS if (ExprNodeDescUtils.indexOf(outputColExpr, keyExprs) == -1) { @@ -795,6 +811,7 @@ * The Node Processor for Column Pruning on Join Operators. */ public static class ColumnPrunerJoinProc implements NodeProcessor { + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { JoinOperator op = (JoinOperator) nd; @@ -817,9 +834,10 @@ * The Node Processor for Column Pruning on Map Join Operators. */ public static class ColumnPrunerMapJoinProc implements NodeProcessor { + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { - MapJoinOperator op = (MapJoinOperator) nd; + AbstractMapJoinOperator op = (AbstractMapJoinOperator) nd; pruneJoinOperator(ctx, op, op.getConf(), op.getColumnExprMap(), op .getConf().getRetainList(), true); return null; @@ -878,6 +896,7 @@ List> childOperators = op .getChildOperators(); + LOG.info("JOIN " + op.getIdentifier() + " oldExprs: " + conf.getExprs()); List childColLists = cppCtx.genColLists(op); if (childColLists == null) { return; @@ -985,6 +1004,7 @@ rs.add(col); } + LOG.info("JOIN " + op.getIdentifier() + " newExprs: " + conf.getExprs()); op.setColumnExprMap(newColExprMap); conf.setOutputColumnNames(outputCols); op.getSchema().setSignature(rs); Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java (revision 0) @@ -0,0 +1,944 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.hadoop.hive.ql.optimizer; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.Stack; + +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.ql.exec.ColumnInfo; +import org.apache.hadoop.hive.ql.exec.FileSinkOperator; +import org.apache.hadoop.hive.ql.exec.FilterOperator; +import org.apache.hadoop.hive.ql.exec.GroupByOperator; +import org.apache.hadoop.hive.ql.exec.JoinOperator; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; +import org.apache.hadoop.hive.ql.exec.RowSchema; +import org.apache.hadoop.hive.ql.exec.SelectOperator; +import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.lib.NodeProcessor; +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.RowResolver; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; +import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; +import org.apache.hadoop.hive.ql.plan.FileSinkDesc; +import org.apache.hadoop.hive.ql.plan.GroupByDesc; +import org.apache.hadoop.hive.ql.plan.JoinCondDesc; +import org.apache.hadoop.hive.ql.plan.JoinDesc; +import org.apache.hadoop.hive.ql.plan.PlanUtils; +import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; +import org.apache.hadoop.hive.ql.plan.TableScanDesc; +import org.apache.hadoop.hive.ql.udf.UDFType; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; + +import com.google.common.collect.ImmutableSet; + +/** + * Factory for generating the different node processors used by ConstantPropagate. + */ +public final class ConstantPropagateProcFactory { + protected static final Log LOG = LogFactory.getLog(ConstantPropagateProcFactory.class.getName()); + protected static Set> propagatableUdfs = new HashSet>(); + + static { + propagatableUdfs.add(GenericUDFOPAnd.class); + }; + + private ConstantPropagateProcFactory() { + // prevent instantiation + } + + /** + * Get ColumnInfo from column expression. + * + * @param rr + * @param desc + * @return + */ + public static ColumnInfo resolveColumn(RowResolver rr, + ExprNodeColumnDesc desc) { + try { + ColumnInfo ci = rr.get(desc.getTabAlias(), desc.getColumn()); + if (ci == null) { + String[] tmp = rr.reverseLookup(desc.getColumn()); + if (tmp == null) { + return null; + } + ci = rr.get(tmp[0], tmp[1]); + ci.setTabAlias(tmp[0]); + ci.setAlias(tmp[1]); + } else { + String[] tmp = rr.reverseLookup(ci.getInternalName()); + if (tmp == null) { + return null; + } + ci.setTabAlias(tmp[0]); + ci.setAlias(tmp[1]); + } + return ci; + } catch (SemanticException e) { + throw new RuntimeException(e); + } + } + + private static final Set unSupportedTypes = ImmutableSet + .builder() + .add(PrimitiveCategory.DECIMAL) + .add(PrimitiveCategory.VARCHAR) + .add(PrimitiveCategory.CHAR).build(); + + /** + * Cast type from expression type to expected type ti. + * + * @param desc constant expression + * @param ti expected type info + * @return cast constant, or null if the type cast failed. + */ + private static ExprNodeConstantDesc typeCast(ExprNodeDesc desc, TypeInfo ti) { + if (desc instanceof ExprNodeNullDesc) { + return null; + } + if (!(ti instanceof PrimitiveTypeInfo) || !(desc.getTypeInfo() instanceof PrimitiveTypeInfo)) { + return null; + } + + PrimitiveTypeInfo priti = (PrimitiveTypeInfo) ti; + PrimitiveTypeInfo descti = (PrimitiveTypeInfo) desc.getTypeInfo(); + + if (unSupportedTypes.contains(priti.getPrimitiveCategory()) + || unSupportedTypes.contains(descti.getPrimitiveCategory())) { + // FIXME: support template types. It currently has conflict with + // ExprNodeConstantDesc + return null; + } + LOG.debug("Casting " + desc + " to type " + ti); + ExprNodeConstantDesc c = (ExprNodeConstantDesc) desc; + ObjectInspector origOI = + TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(desc.getTypeInfo()); + ObjectInspector oi = + TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(ti); + Converter converter = ObjectInspectorConverters.getConverter(origOI, oi); + Object convObj = converter.convert(c.getValue()); + + // Convert integer related types because converters are not sufficient + if (convObj instanceof Integer) { + switch (priti.getPrimitiveCategory()) { + case BYTE: + convObj = new Byte((byte) (((Integer) convObj).intValue())); + break; + case SHORT: + convObj = new Short((short) ((Integer) convObj).intValue()); + break; + case LONG: + convObj = new Long(((Integer) convObj).intValue()); + default: + } + } + return new ExprNodeConstantDesc(ti, convObj); + } + + /** + * Fold input expression desc. + * + * If desc is a UDF and all parameters are constants, evaluate it. If desc is a column expression, + * find it from propagated constants, and if there is, replace it with constant. + * + * @param desc folding expression + * @param constants current propagated constant map + * @param cppCtx + * @param op processing operator + * @param propagate if true, assignment expressions will be added to constants. + * @return fold expression + */ + private static ExprNodeDesc foldExpr(ExprNodeDesc desc, Map constants, + ConstantPropagateProcCtx cppCtx, Operator op, int tag, + boolean propagate) { + if (desc instanceof ExprNodeGenericFuncDesc) { + ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) desc; + + // The function must be deterministic, or we can't fold it. + GenericUDF udf = funcDesc.getGenericUDF(); + if (!isDeterministicUdf(udf)) { + LOG.debug("Function " + udf.getClass() + " undeterministic, quit folding."); + return desc; + } + + boolean propagateNext = propagate && propagatableUdfs.contains(udf.getClass()); + List newExprs = new ArrayList(); + for (ExprNodeDesc childExpr : desc.getChildren()) { + newExprs.add(foldExpr(childExpr, constants, cppCtx, op, tag, propagateNext)); + } + + // If all child expressions are constants, evaluate UDF immediately + ExprNodeDesc constant = evaluateFunction(udf, newExprs, desc.getChildren()); + if (constant != null) { + LOG.debug("Folding expression:" + desc + " -> " + constant); + return constant; + } else { + + // Check if the function can be short cut. + ExprNodeDesc shortcut = shortcutFunction(udf, newExprs); + if (shortcut != null) { + LOG.debug("Folding expression:" + desc + " -> " + shortcut); + return shortcut; + } + ((ExprNodeGenericFuncDesc) desc).setChildren(newExprs); + } + + // If in some selected binary operators (=, is null, etc), one of the + // expressions are + // constant, add them to colToConstatns as half-deterministic columns. + if (propagate) { + propagate(udf, newExprs, cppCtx.getRowResolver(op), constants); + } + + return desc; + } else if (desc instanceof ExprNodeColumnDesc) { + if (op.getParentOperators() == null || op.getParentOperators().isEmpty()) { + return desc; + } + Operator parent = op.getParentOperators().get(tag); + ExprNodeDesc col = evaluateColumn((ExprNodeColumnDesc) desc, cppCtx, parent); + if (col != null) { + LOG.debug("Folding expression:" + desc + " -> " + col); + return col; + } + } + return desc; + } + + private static boolean isDeterministicUdf(GenericUDF udf) { + UDFType udfType = udf.getClass().getAnnotation(UDFType.class); + if (udf instanceof GenericUDFBridge) { + udfType = ((GenericUDFBridge) udf).getUdfClass().getAnnotation(UDFType.class); + } + if (udfType.deterministic() == false) { + return false; + } + + // If udf is requiring additional jars, we can't determine the result in + // compile time. + String[] files; + String[] jars; + if (udf instanceof GenericUDFBridge) { + GenericUDFBridge bridge = (GenericUDFBridge) udf; + String udfClassName = bridge.getUdfClassName(); + try { + UDF udfInternal = + (UDF) Class.forName(bridge.getUdfClassName(), true, JavaUtils.getClassLoader()) + .newInstance(); + files = udfInternal.getRequiredFiles(); + jars = udf.getRequiredJars(); + } catch (Exception e) { + LOG.error("The UDF implementation class '" + udfClassName + + "' is not present in the class path"); + return false; + } + } else { + files = udf.getRequiredFiles(); + jars = udf.getRequiredJars(); + } + if (files != null || jars != null) { + return false; + } + return true; + } + + /** + * Propagate assignment expression, adding an entry into constant map constants. + * + * @param udf expression UDF, currently only 2 UDFs are supported: '=' and 'is null'. + * @param newExprs child expressions (parameters). + * @param cppCtx + * @param op + * @param constants + */ + private static void propagate(GenericUDF udf, List newExprs, RowResolver rr, + Map constants) { + if (udf instanceof GenericUDFOPEqual) { + ExprNodeDesc lOperand = newExprs.get(0); + ExprNodeDesc rOperand = newExprs.get(1); + ExprNodeColumnDesc c; + ExprNodeConstantDesc v; + if (lOperand instanceof ExprNodeColumnDesc && rOperand instanceof ExprNodeConstantDesc) { + c = (ExprNodeColumnDesc) lOperand; + v = (ExprNodeConstantDesc) rOperand; + } else if (rOperand instanceof ExprNodeColumnDesc && lOperand instanceof ExprNodeConstantDesc) { + c = (ExprNodeColumnDesc) rOperand; + v = (ExprNodeConstantDesc) lOperand; + } else { + return; + } + ColumnInfo ci = resolveColumn(rr, c); + if (ci != null) { + LOG.debug("Filter " + udf + " is identified as a value assignment, propagate it."); + if (!v.getTypeInfo().equals(ci.getType())) { + v = typeCast(v, ci.getType()); + } + if (v != null) { + constants.put(ci, v); + } + } + } else if (udf instanceof GenericUDFOPNull) { + ExprNodeDesc operand = newExprs.get(0); + if (operand instanceof ExprNodeColumnDesc) { + LOG.debug("Filter " + udf + " is identified as a value assignment, propagate it."); + ExprNodeColumnDesc c = (ExprNodeColumnDesc) operand; + ColumnInfo ci = resolveColumn(rr, c); + if (ci != null) { + constants.put(ci, new ExprNodeNullDesc()); + } + } + } + } + + private static ExprNodeDesc shortcutFunction(GenericUDF udf, List newExprs) { + if (udf instanceof GenericUDFOPAnd) { + for (int i = 0; i < 2; i++) { + ExprNodeDesc childExpr = newExprs.get(i); + if (childExpr instanceof ExprNodeConstantDesc) { + ExprNodeConstantDesc c = (ExprNodeConstantDesc) childExpr; + if (c.getValue() == Boolean.TRUE) { + + // if true, prune it + return newExprs.get(Math.abs(i - 1)); + } else { + + // if false return false + return childExpr; + } + } + } + } + + if (udf instanceof GenericUDFOPOr) { + for (int i = 0; i < 2; i++) { + ExprNodeDesc childExpr = newExprs.get(i); + if (childExpr instanceof ExprNodeConstantDesc) { + ExprNodeConstantDesc c = (ExprNodeConstantDesc) childExpr; + if (c.getValue() == Boolean.FALSE) { + + // if false, prune it + return newExprs.get(Math.abs(i - 1)); + } else { + + // if true return true + return childExpr; + } + } + } + } + + return null; + } + + /** + * Evaluate column, replace the deterministic columns with constants if possible + * + * @param desc + * @param ctx + * @param op + * @param colToConstants + * @return + */ + private static ExprNodeDesc evaluateColumn(ExprNodeColumnDesc desc, + ConstantPropagateProcCtx cppCtx, Operator parent) { + try { + ColumnInfo ci = null; + RowResolver rr = cppCtx.getOpToParseCtxMap().get(parent).getRowResolver(); + String[] tmp = rr.reverseLookup(desc.getColumn()); + if (tmp == null) { + LOG.error("Reverse look up of column " + desc + " error!"); + return null; + } + ci = rr.get(tmp[0], tmp[1]); + if (ci != null) { + ExprNodeDesc constant = null; + // Additional work for union operator, see union27.q + if (ci.getAlias() == null) { + for (Entry e : cppCtx.getOpToConstantExprs().get(parent).entrySet()) { + if (e.getKey().getInternalName().equals(ci.getInternalName())) { + constant = e.getValue(); + break; + } + } + } else { + constant = cppCtx.getOpToConstantExprs().get(parent).get(ci); + } + if (constant != null) { + if (constant instanceof ExprNodeConstantDesc + && !constant.getTypeInfo().equals(desc.getTypeInfo())) { + return typeCast(constant, desc.getTypeInfo()); + } + return constant; + } else { + return null; + } + } + LOG.error("Can't resolve " + desc.getTabAlias() + "." + desc.getColumn()); + throw new RuntimeException("Can't resolve " + desc.getTabAlias() + "." + desc.getColumn()); + } catch (SemanticException e) { + throw new RuntimeException(e); + } + + } + + /** + * Evaluate UDF + * + * @param udf UDF object + * @param exprs + * @param oldExprs + * @return null if expression cannot be evaluated (not all parameters are constants). Or evaluated + * ExprNodeConstantDesc if possible. + * @throws HiveException + */ + private static ExprNodeDesc evaluateFunction(GenericUDF udf, List exprs, + List oldExprs) { + DeferredJavaObject[] arguments = new DeferredJavaObject[exprs.size()]; + ObjectInspector[] argois = new ObjectInspector[exprs.size()]; + for (int i = 0; i < exprs.size(); i++) { + ExprNodeDesc desc = exprs.get(i); + if (desc instanceof ExprNodeConstantDesc) { + ExprNodeConstantDesc constant = (ExprNodeConstantDesc) exprs.get(i); + if (!constant.getTypeInfo().equals(oldExprs.get(i).getTypeInfo())) { + constant = typeCast(constant, oldExprs.get(i).getTypeInfo()); + if (constant == null) { + return null; + } + } + Object value = constant.getValue(); + PrimitiveTypeInfo pti = (PrimitiveTypeInfo) constant.getTypeInfo(); + Object writableValue = + PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti) + .getPrimitiveWritableObject(value); + arguments[i] = new DeferredJavaObject(writableValue); + argois[i] = + ObjectInspectorUtils.getConstantObjectInspector(constant.getWritableObjectInspector(), + writableValue); + } else if (desc instanceof ExprNodeNullDesc) { + + // FIXME: add null support. + return null; + } else { + return null; + } + } + + try { + ObjectInspector oi = udf.initialize(argois); + Object o = udf.evaluate(arguments); + LOG.debug(udf.getClass().getName() + "(" + exprs + ")=" + o); + if (o == null) { + return new ExprNodeNullDesc(); + } + Class clz = o.getClass(); + if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(clz)) { + PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi; + TypeInfo typeInfo = poi.getTypeInfo(); + + // Handling parameterized types (varchar, decimal, etc). + if (typeInfo.getTypeName().contains(serdeConstants.DECIMAL_TYPE_NAME) + || typeInfo.getTypeName().contains(serdeConstants.VARCHAR_TYPE_NAME) + || typeInfo.getTypeName().contains(serdeConstants.CHAR_TYPE_NAME)) { + + // Do not support parameterized types. + return null; + } + o = poi.getPrimitiveJavaObject(o); + } else if (PrimitiveObjectInspectorUtils.isPrimitiveJavaClass(clz)) { + + } else { + LOG.error("Unable to evaluate " + udf + ". Return value unrecoginizable."); + return null; + } + return new ExprNodeConstantDesc(o); + } catch (HiveException e) { + LOG.error("Evaluation function " + udf.getClass() + + " failed in Constant Propagatation Optimizer."); + throw new RuntimeException(e); + } + } + + /** + * Change operator row schema, replace column with constant if it is. + * + * @param op + * @param constants + * @throws SemanticException + */ + private static void foldOperator(Operator op, + ConstantPropagateProcCtx cppCtx) throws SemanticException { + RowSchema schema = op.getSchema(); + Map constants = cppCtx.getOpToConstantExprs().get(op); + if (schema != null && schema.getSignature() != null) { + for (ColumnInfo col : schema.getSignature()) { + ExprNodeDesc constant = constants.get(col); + if (constant != null) { + LOG.debug("Replacing column " + col + " with constant " + constant + " in " + op); + if (!col.getType().equals(constant.getTypeInfo())) { + constant = typeCast(constant, col.getType()); + } + if (constant != null) { + col.setObjectinspector(constant.getWritableObjectInspector()); + } + } + } + } + + Map colExprMap = op.getColumnExprMap(); + if (colExprMap != null) { + for (Entry e : constants.entrySet()) { + String internalName = e.getKey().getInternalName(); + if (colExprMap.containsKey(internalName)) { + colExprMap.put(internalName, e.getValue()); + } + } + } + } + + /** + * Node Processor for Constant Propagation on Filter Operators. The processor is to fold + * conditional expressions and extract assignment expressions and propagate them. + */ + public static class ConstantPropagateFilterProc implements NodeProcessor { + public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + throws SemanticException { + FilterOperator op = (FilterOperator) nd; + ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; + Map constants = cppCtx.getPropagatedConstants(op); + cppCtx.getOpToConstantExprs().put(op, constants); + + ExprNodeDesc condn = op.getConf().getPredicate(); + LOG.debug("Old filter FIL[" + op.getIdentifier() + "] conditions:" + condn.getExprString()); + ExprNodeDesc newCondn = foldExpr(condn, constants, cppCtx, op, 0, true); + if (newCondn instanceof ExprNodeConstantDesc) { + ExprNodeConstantDesc c = (ExprNodeConstantDesc) newCondn; + if (c.getValue() == Boolean.TRUE) { + cppCtx.addOpToDelete(op); + LOG.debug("Filter expression " + condn + " holds true. Will delete it."); + } else if (c.getValue() == Boolean.FALSE) { + LOG.warn("Filter expression " + condn + " holds false!"); + } + } + LOG.debug("New filter FIL[" + op.getIdentifier() + "] conditions:" + newCondn.getExprString()); + + // merge it with the downstream col list + op.getConf().setPredicate(newCondn); + foldOperator(op, cppCtx); + return null; + } + + } + + /** + * Factory method to get the ConstantPropagateFilterProc class. + * + * @return ConstantPropagateFilterProc + */ + public static ConstantPropagateFilterProc getFilterProc() { + return new ConstantPropagateFilterProc(); + } + + /** + * Node Processor for Constant Propagate for Group By Operators. + */ + public static class ConstantPropagateGroupByProc implements NodeProcessor { + public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + throws SemanticException { + GroupByOperator op = (GroupByOperator) nd; + ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; + Map colToConstants = cppCtx.getPropagatedConstants(op); + cppCtx.getOpToConstantExprs().put(op, colToConstants); + + if (colToConstants.isEmpty()) { + return null; + } + + GroupByDesc conf = op.getConf(); + ArrayList keys = conf.getKeys(); + for (int i = 0; i < keys.size(); i++) { + ExprNodeDesc key = keys.get(i); + ExprNodeDesc newkey = foldExpr(key, colToConstants, cppCtx, op, 0, false); + keys.set(i, newkey); + } + foldOperator(op, cppCtx); + return null; + } + } + + /** + * Factory method to get the ConstantPropagateGroupByProc class. + * + * @return ConstantPropagateGroupByProc + */ + public static ConstantPropagateGroupByProc getGroupByProc() { + return new ConstantPropagateGroupByProc(); + } + + /** + * The Default Node Processor for Constant Propagation. + */ + public static class ConstantPropagateDefaultProc implements NodeProcessor { + @SuppressWarnings("unchecked") + public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + throws SemanticException { + ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; + Operator op = (Operator) nd; + Map constants = cppCtx.getPropagatedConstants(op); + cppCtx.getOpToConstantExprs().put(op, constants); + if (constants.isEmpty()) { + return null; + } + foldOperator(op, cppCtx); + return null; + } + } + + /** + * Factory method to get the ConstantPropagateDefaultProc class. + * + * @return ConstantPropagateDefaultProc + */ + public static ConstantPropagateDefaultProc getDefaultProc() { + return new ConstantPropagateDefaultProc(); + } + + /** + * The Node Processor for Constant Propagation for Select Operators. + */ + public static class ConstantPropagateSelectProc implements NodeProcessor { + public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + throws SemanticException { + SelectOperator op = (SelectOperator) nd; + ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; + Map constants = cppCtx.getPropagatedConstants(op); + cppCtx.getOpToConstantExprs().put(op, constants); + foldOperator(op, cppCtx); + List colList = op.getConf().getColList(); + if (colList != null) { + for (int i = 0; i < colList.size(); i++) { + ExprNodeDesc newCol = foldExpr(colList.get(i), constants, cppCtx, op, 0, false); + colList.set(i, newCol); + } + LOG.debug("New column list:(" + StringUtils.join(colList, " ") + ")"); + } + return null; + } + } + + /** + * The Factory method to get the ConstantPropagateSelectProc class. + * + * @return ConstantPropagateSelectProc + */ + public static ConstantPropagateSelectProc getSelectProc() { + return new ConstantPropagateSelectProc(); + } + + /** + * The Node Processor for constant propagation for FileSink Operators. In addition to constant + * propagation, this processor also prunes dynamic partitions to static partitions if possible. + */ + public static class ConstantPropagateFileSinkProc implements NodeProcessor { + public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + throws SemanticException { + FileSinkOperator op = (FileSinkOperator) nd; + ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; + Map constants = cppCtx.getPropagatedConstants(op); + cppCtx.getOpToConstantExprs().put(op, constants); + if (constants.isEmpty()) { + return null; + } + FileSinkDesc fsdesc = op.getConf(); + DynamicPartitionCtx dpCtx = fsdesc.getDynPartCtx(); + if (dpCtx != null) { + + // If all dynamic partitions are propagated as constant, remove DP. + Set inputs = dpCtx.getInputToDPCols().keySet(); + + // Assume only 1 parent for FS operator + Operator parent = op.getParentOperators().get(0); + Map parentConstants = cppCtx.getPropagatedConstants(parent); + RowResolver rr = cppCtx.getOpToParseCtxMap().get(parent).getRowResolver(); + boolean allConstant = true; + for (String input : inputs) { + String tmp[] = rr.reverseLookup(input); + ColumnInfo ci = rr.get(tmp[0], tmp[1]); + if (parentConstants.get(ci) == null) { + allConstant = false; + break; + } + } + if (allConstant) { + pruneDP(fsdesc); + } + } + foldOperator(op, cppCtx); + return null; + } + + private void pruneDP(FileSinkDesc fsdesc) { + // FIXME: Support pruning dynamic partitioning. + LOG.info("DP can be rewritten to SP!"); + } + } + + public static NodeProcessor getFileSinkProc() { + return new ConstantPropagateFileSinkProc(); + } + + /** + * The Node Processor for Constant Propagation for Operators which is designed to stop propagate. + * Currently these kinds of Operators include UnionOperator and ScriptOperator. + */ + public static class ConstantPropagateStopProc implements NodeProcessor { + public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + throws SemanticException { + Operator op = (Operator) nd; + ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; + cppCtx.getOpToConstantExprs().put(op, new HashMap()); + LOG.debug("Stop propagate constants on op " + op.getOperatorId()); + return null; + } + } + + public static NodeProcessor getStopProc() { + return new ConstantPropagateStopProc(); + } + + /** + * The Node Processor for Constant Propagation for ReduceSink Operators. If the RS Operator is for + * a join, then only those constants from inner join tables, or from the 'inner side' of a outer + * join (left table for left outer join and vice versa) can be propagated. + */ + public static class ConstantPropagateReduceSinkProc implements NodeProcessor { + public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + throws SemanticException { + ReduceSinkOperator op = (ReduceSinkOperator) nd; + ReduceSinkDesc rsDesc = op.getConf(); + ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; + Map constants = cppCtx.getPropagatedConstants(op); + + cppCtx.getOpToConstantExprs().put(op, constants); + if (constants.isEmpty()) { + return null; + } + + if (op.getChildOperators().size() == 1 + && op.getChildOperators().get(0) instanceof JoinOperator) { + JoinOperator joinOp = (JoinOperator) op.getChildOperators().get(0); + if (skipFolding(joinOp.getConf(), rsDesc.getTag())) { + LOG.debug("Skip folding in outer join " + op); + cppCtx.getOpToConstantExprs().put(op, new HashMap()); + return null; + } + } + + if (rsDesc.getDistinctColumnIndices() != null + && !rsDesc.getDistinctColumnIndices().isEmpty()) { + LOG.debug("Skip folding in distinct subqueries " + op); + cppCtx.getOpToConstantExprs().put(op, new HashMap()); + return null; + } + + // key columns + ArrayList newKeyEpxrs = new ArrayList(); + for (ExprNodeDesc desc : rsDesc.getKeyCols()) { + newKeyEpxrs.add(foldExpr(desc, constants, cppCtx, op, 0, false)); + } + rsDesc.setKeyCols(newKeyEpxrs); + + // partition columns + ArrayList newPartExprs = new ArrayList(); + for (ExprNodeDesc desc : rsDesc.getPartitionCols()) { + ExprNodeDesc expr = foldExpr(desc, constants, cppCtx, op, 0, false); + if (expr instanceof ExprNodeConstantDesc || expr instanceof ExprNodeNullDesc) { + continue; + } + newPartExprs.add(expr); + } + rsDesc.setPartitionCols(newPartExprs); + + // value columns + ArrayList newValExprs = new ArrayList(); + for (ExprNodeDesc desc : rsDesc.getValueCols()) { + newValExprs.add(foldExpr(desc, constants, cppCtx, op, 0, false)); + } + rsDesc.setValueCols(newValExprs); + foldOperator(op, cppCtx); + return null; + } + + private boolean skipFolding(JoinDesc joinDesc, int tag) { + JoinCondDesc[] conds = joinDesc.getConds(); + int i; + for (i = conds.length - 1; i >= 0; i--) { + if (conds[i].getType() == JoinDesc.INNER_JOIN) { + if (tag == i + 1) + return false; + } else if (conds[i].getType() == JoinDesc.FULL_OUTER_JOIN) { + return true; + } else if (conds[i].getType() == JoinDesc.RIGHT_OUTER_JOIN) { + if (tag == i + 1) + return false; + return true; + } else if (conds[i].getType() == JoinDesc.LEFT_OUTER_JOIN) { + if (tag == i + 1) + return true; + } + } + if (tag == 0) { + return false; + } + return true; + } + + } + + public static NodeProcessor getReduceSinkProc() { + return new ConstantPropagateReduceSinkProc(); + } + + /** + * The Node Processor for Constant Propagation for Join Operators. + */ + public static class ConstantPropagateJoinProc implements NodeProcessor { + public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + throws SemanticException { + JoinOperator op = (JoinOperator) nd; + JoinDesc conf = op.getConf(); + ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; + Map constants = cppCtx.getPropagatedConstants(op); + cppCtx.getOpToConstantExprs().put(op, constants); + if (constants.isEmpty()) { + return null; + } + + // Note: the following code (removing folded constants in exprs) is deeply coupled with + // ColumnPruner optimizer. + // Assuming ColumnPrunner will remove constant columns so we don't deal with output columns. + // Except one case that the join operator is followed by a redistribution (RS operator). + if (op.getChildOperators().size() == 1 + && op.getChildOperators().get(0) instanceof ReduceSinkOperator) { + LOG.debug("Skip JOIN-RS structure."); + return null; + } + LOG.info("Old exprs " + conf.getExprs()); + Iterator>> itr = conf.getExprs().entrySet().iterator(); + while (itr.hasNext()) { + Entry> e = itr.next(); + int tag = e.getKey(); + List exprs = e.getValue(); + if (exprs == null) { + continue; + } + List newExprs = new ArrayList(); + for (ExprNodeDesc expr : exprs) { + ExprNodeDesc newExpr = foldExpr(expr, constants, cppCtx, op, tag, false); + if (newExpr instanceof ExprNodeConstantDesc || newExpr instanceof ExprNodeNullDesc) { + LOG.info("expr " + newExpr + " fold from " + expr + " is removed."); + continue; + } + newExprs.add(newExpr); + } + e.setValue(newExprs); + } + LOG.info("New exprs " + conf.getExprs()); + + for (List v : conf.getFilters().values()) { + for (int i = 0; i < v.size(); i++) { + ExprNodeDesc expr = foldExpr(v.get(i), constants, cppCtx, op, 0, false); + v.set(i, expr); + } + } + foldOperator(op, cppCtx); + return null; + } + + } + + public static NodeProcessor getJoinProc() { + return new ConstantPropagateJoinProc(); + } + + /** + * The Node Processor for Constant Propagation for Table Scan Operators. + */ + public static class ConstantPropagateTableScanProc implements NodeProcessor { + public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) + throws SemanticException { + TableScanOperator op = (TableScanOperator) nd; + TableScanDesc conf = op.getConf(); + ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; + Map constants = cppCtx.getPropagatedConstants(op); + cppCtx.getOpToConstantExprs().put(op, constants); + ExprNodeGenericFuncDesc pred = conf.getFilterExpr(); + if (pred == null) { + return null; + } + + List newChildren = new ArrayList(); + for (ExprNodeDesc expr : pred.getChildren()) { + ExprNodeDesc constant = foldExpr(expr, constants, cppCtx, op, 0, false); + newChildren.add(constant); + } + pred.setChildren(newChildren); + return null; + } + } + + public static NodeProcessor getTableScanProc() { + return new ConstantPropagateTableScanProc(); + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (working copy) @@ -67,6 +67,9 @@ HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_MAP_GROUPBY_SORT)) { transformations.add(new GroupByOptimizer()); } + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTCONSTANTPROPAGATION)) { + transformations.add(new ConstantPropagate()); + } transformations.add(new ColumnPruner()); if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME)) { transformations.add(new SkewJoinOptimizer()); Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java (working copy) @@ -18,13 +18,17 @@ package org.apache.hadoop.hive.ql.optimizer; +import java.util.HashMap; +import java.util.Map; import java.util.Stack; import org.apache.hadoop.hive.ql.exec.JoinOperator; +import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.parse.QBJoinTree; import org.apache.hadoop.hive.ql.parse.SemanticException; public class SortedMergeJoinProc extends AbstractSMBJoinProc implements NodeProcessor { @@ -42,6 +46,11 @@ JoinOperator joinOp = (JoinOperator) nd; SortBucketJoinProcCtx smbJoinContext = (SortBucketJoinProcCtx) procCtx; + Map mapJoinMap = pGraphContext.getMapJoinContext(); + if (mapJoinMap == null) { + mapJoinMap = new HashMap(); + pGraphContext.setMapJoinContext(mapJoinMap); + } boolean convert = canConvertJoinToSMBJoin( Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java (revision 0) @@ -0,0 +1,173 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.exec.FileSinkOperator; +import org.apache.hadoop.hive.ql.exec.FilterOperator; +import org.apache.hadoop.hive.ql.exec.GroupByOperator; +import org.apache.hadoop.hive.ql.exec.JoinOperator; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; +import org.apache.hadoop.hive.ql.exec.ScriptOperator; +import org.apache.hadoop.hive.ql.exec.SelectOperator; +import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; +import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; +import org.apache.hadoop.hive.ql.lib.Dispatcher; +import org.apache.hadoop.hive.ql.lib.GraphWalker; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.lib.NodeProcessor; +import org.apache.hadoop.hive.ql.lib.Rule; +import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.parse.OpParseContext; +import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; + +/** + * Implementation of one of the rule-based optimization steps. ConstantPropagate traverse the DAG + * from root to child. For each conditional expression, process as follows: + * + * 1. Fold constant expression: if the expression is a UDF and all parameters are constant. + * + * 2. Shortcut expression: if the expression is a logical operator and it can be shortcut by + * some constants of its parameters. + * + * 3. Propagate expression: if the expression is an assignment like column=constant, the expression + * will be propagate to parents to see if further folding operation is possible. + */ +public class ConstantPropagate implements Transform { + + private static final Log LOG = LogFactory.getLog(ConstantPropagate.class); + protected ParseContext pGraphContext; + private Map, OpParseContext> opToParseCtxMap; + + public ConstantPropagate() {} + + /** + * Transform the query tree. + * + * @param pactx + * the current parse context + */ + @Override + public ParseContext transform(ParseContext pactx) throws SemanticException { + if (pactx.getConf().getBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED)) { + // Constant propagate is currently conflict with vectorizer, disabling constant propagate + // if the later is enabled. + return pactx; + } + if (pactx.getConf().getBoolVar(ConfVars.HIVEOPTSORTMERGEBUCKETMAPJOIN)) { + return pactx; + } + pGraphContext = pactx; + opToParseCtxMap = pGraphContext.getOpParseCtx(); + + // generate pruned column list for all relevant operators + ConstantPropagateProcCtx cppCtx = new ConstantPropagateProcCtx(opToParseCtxMap); + + // create a walker which walks the tree in a DFS manner while maintaining + // the operator stack. The dispatcher + // generates the plan from the operator tree + Map opRules = new LinkedHashMap(); + + opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), + ConstantPropagateProcFactory.getFilterProc()); + opRules.put(new RuleRegExp("R2", GroupByOperator.getOperatorName() + "%"), + ConstantPropagateProcFactory.getGroupByProc()); + opRules.put(new RuleRegExp("R3", SelectOperator.getOperatorName() + "%"), + ConstantPropagateProcFactory.getSelectProc()); + opRules.put(new RuleRegExp("R4", FileSinkOperator.getOperatorName() + "%"), + ConstantPropagateProcFactory.getFileSinkProc()); + opRules.put(new RuleRegExp("R5", ReduceSinkOperator.getOperatorName() + "%"), + ConstantPropagateProcFactory.getReduceSinkProc()); + opRules.put(new RuleRegExp("R6", JoinOperator.getOperatorName() + "%"), + ConstantPropagateProcFactory.getJoinProc()); + opRules.put(new RuleRegExp("R7", TableScanOperator.getOperatorName() + "%"), + ConstantPropagateProcFactory.getTableScanProc()); + opRules.put(new RuleRegExp("R8", ScriptOperator.getOperatorName() + "%"), + ConstantPropagateProcFactory.getStopProc()); + + // The dispatcher fires the processor corresponding to the closest matching + // rule and passes the context along + Dispatcher disp = new DefaultRuleDispatcher(ConstantPropagateProcFactory + .getDefaultProc(), opRules, cppCtx); + GraphWalker ogw = new ConstantPropagateWalker(disp); + + // Create a list of operator nodes to start the walking. + ArrayList topNodes = new ArrayList(); + topNodes.addAll(pGraphContext.getTopOps().values()); + ogw.startWalking(topNodes, null); + for (Operator opToDelete : cppCtx.getOpToDelete()) { + if (opToDelete.getParentOperators() == null || opToDelete.getParentOperators().size() != 1) { + throw new RuntimeException("Error pruning operator " + opToDelete + + ". It should have only 1 parent."); + } + opToDelete.getParentOperators().get(0).removeChildAndAdoptItsChildren(opToDelete); + } + return pGraphContext; + } + + + /** + * Walks the op tree in root first order. + */ + public static class ConstantPropagateWalker extends DefaultGraphWalker { + + public ConstantPropagateWalker(Dispatcher disp) { + super(disp); + } + + @Override + public void walk(Node nd) throws SemanticException { + + List parents = ((Operator) nd).getParentOperators(); + if ((parents == null) + || getDispatchedList().containsAll(parents)) { + opStack.push(nd); + + // all children are done or no need to walk the children + dispatch(nd, opStack); + opStack.pop(); + } else { + getToWalk().removeAll(parents); + getToWalk().add(0, nd); + getToWalk().addAll(0, parents); + return; + } + + // move all the children to the front of queue + List children = nd.getChildren(); + if (children != null) { + getToWalk().removeAll(children); + getToWalk().addAll(children); + } + } + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java (working copy) @@ -21,11 +21,14 @@ import java.util.LinkedHashMap; import java.util.Map; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator; import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator; import org.apache.hadoop.hive.ql.exec.LimitOperator; +import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.PTFOperator; import org.apache.hadoop.hive.ql.exec.ScriptOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -77,6 +80,7 @@ */ public class PredicatePushDown implements Transform { + private static final Log LOG = LogFactory.getLog(PredicatePushDown.class); private ParseContext pGraphContext; @Override @@ -126,6 +130,7 @@ topNodes.addAll(pGraphContext.getTopOps().values()); ogw.startWalking(topNodes, null); + LOG.debug("After PPD:\n" + Operator.toString(pctx.getTopOps().values())); return pGraphContext; } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java (working copy) @@ -34,6 +34,7 @@ */ public class ExprNodeConstantDesc extends ExprNodeDesc implements Serializable { private static final long serialVersionUID = 1L; + final protected transient static char[] hexArray = "0123456789ABCDEF".toCharArray(); private Object value; public ExprNodeConstantDesc() { @@ -83,6 +84,15 @@ if (typeInfo.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) { return "'" + value.toString() + "'"; + } else if (typeInfo.getTypeName().equals(serdeConstants.BINARY_TYPE_NAME)) { + byte[] bytes = (byte[]) value; + char[] hexChars = new char[bytes.length * 2]; + for (int j = 0; j < bytes.length; j++) { + int v = bytes[j] & 0xFF; + hexChars[j * 2] = hexArray[v >>> 4]; + hexChars[j * 2 + 1] = hexArray[v & 0x0F]; + } + return new String(hexChars); } else { return value.toString(); } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java (working copy) @@ -104,4 +104,9 @@ public ExprNodeEvaluator[] getChildren() { return null; } + + @Override + public String toString() { + return "ExprNodeEvaluator[" + expr + "]"; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (working copy) @@ -220,4 +220,8 @@ return true; } + + public void setObjectinspector(ObjectInspector writableObjectInspector) { + this.objectInspector = writableObjectInspector; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (working copy) @@ -131,7 +131,7 @@ int bigPos = conf.getPosBigTable(); List valueOI = new ArrayList(); for (int i = 0; i < valueIndex.length; i++) { - if (valueIndex[i] >= 0) { + if (valueIndex[i] >= 0 && !joinKeysObjectInspectors[bigPos].isEmpty()) { valueOI.add(joinKeysObjectInspectors[bigPos].get(valueIndex[i])); } else { valueOI.add(inspectors.get(i)); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java (working copy) @@ -32,8 +32,7 @@ /** * Select operator implementation. */ -public class SelectOperator extends Operator implements - Serializable { +public class SelectOperator extends Operator implements Serializable { private static final long serialVersionUID = 1L; protected transient ExprNodeEvaluator[] eval; @@ -60,10 +59,9 @@ } } output = new Object[eval.length]; - LOG.info("SELECT " - + ((StructObjectInspector) inputObjInspectors[0]).getTypeName()); - outputObjInspector = initEvaluatorsAndReturnStruct(eval, conf - .getOutputColumnNames(), inputObjInspectors[0]); + LOG.info("SELECT " + ((StructObjectInspector) inputObjInspectors[0]).getTypeName()); + outputObjInspector = initEvaluatorsAndReturnStruct(eval, conf.getOutputColumnNames(), + inputObjInspectors[0]); initializeChildren(hconf); } @@ -81,8 +79,7 @@ } catch (HiveException e) { throw e; } catch (RuntimeException e) { - throw new HiveException("Error evaluating " - + conf.getColList().get(i).getExprString(), e); + throw new HiveException("Error evaluating " + conf.getColList().get(i).getExprString(), e); } forward(output, outputObjInspector); } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java (working copy) @@ -272,6 +272,7 @@ // TODO: this is fishy - we init object inspectors based on first tag. We // should either init for each tag, or if rowInspector doesn't really // matter, then we can create this in ctor and get rid of firstRow. + LOG.info("keys are " + conf.getOutputKeyColumnNames() + " num distributions: " + conf.getNumDistributionKeys()); keyObjectInspector = initEvaluatorsAndReturnStruct(keyEval, distinctColIndices, conf.getOutputKeyColumnNames(), numDistributionKeys, rowInspector); Index: ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (working copy) @@ -255,6 +255,10 @@ int avgRowSize = 0; for (String neededCol : neededColumns) { ColumnInfo ci = getColumnInfoForColumn(neededCol, schema); + if (ci == null) { + // No need to collect statistics of index columns + continue; + } ObjectInspector oi = ci.getObjectInspector(); String colType = ci.getTypeName(); if (colType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME) Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java (revision 1613528) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java (working copy) @@ -54,7 +54,7 @@ @Override public Object evaluate(DeferredObject[] arguments) throws HiveException { - throw new IllegalStateException("never"); + return SessionState.get().getCurrentDatabase(); } @Override