Index: ql/src/test/queries/clientpositive/constprog_type.q
===================================================================
--- ql/src/test/queries/clientpositive/constprog_type.q (revision 0)
+++ ql/src/test/queries/clientpositive/constprog_type.q (revision 0)
@@ -0,0 +1,14 @@
+set hive.optimize.constant.propagation=true;
+
+CREATE TABLE dest1(d date, t timestamp);
+
+EXPLAIN
+INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows);
+
+INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows);
+
+SELECT * FROM dest1;
Index: ql/src/test/queries/clientpositive/constprog_dp.q
===================================================================
--- ql/src/test/queries/clientpositive/constprog_dp.q (revision 0)
+++ ql/src/test/queries/clientpositive/constprog_dp.q (revision 0)
@@ -0,0 +1,11 @@
+set hive.optimize.constant.propagation=true;
+set hive.exec.dynamic.partition.mode=nonstrict;
+
+create table dest(key string, value string) partitioned by (ds string);
+
+EXPLAIN
+from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08';
+
+from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08';
Index: ql/src/test/queries/clientpositive/decimal_udf.q
===================================================================
--- ql/src/test/queries/clientpositive/decimal_udf.q (revision 1558643)
+++ ql/src/test/queries/clientpositive/decimal_udf.q (working copy)
@@ -1,4 +1,5 @@
set hive.fetch.task.conversion=more;
+set hive.optimize.constant.propagation=false;
DROP TABLE IF EXISTS DECIMAL_UDF;
Index: ql/src/test/queries/clientpositive/pcr.q
===================================================================
--- ql/src/test/queries/clientpositive/pcr.q (revision 1558643)
+++ ql/src/test/queries/clientpositive/pcr.q (working copy)
@@ -1,3 +1,5 @@
+set hive.optimize.constant.propagation=false;
+
drop table pcr_t1;
drop table pcr_t2;
drop table pcr_t3;
Index: ql/src/test/queries/clientpositive/annotate_stats_part.q
===================================================================
--- ql/src/test/queries/clientpositive/annotate_stats_part.q (revision 1558643)
+++ ql/src/test/queries/clientpositive/annotate_stats_part.q (working copy)
@@ -2,6 +2,7 @@
set hive.stats.autogather=false;
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
+set hive.optimize.constant.propagation=false;
create table if not exists loc_staging (
state string,
Index: ql/src/test/queries/clientpositive/constprog2.q
===================================================================
--- ql/src/test/queries/clientpositive/constprog2.q (revision 0)
+++ ql/src/test/queries/clientpositive/constprog2.q (revision 0)
@@ -0,0 +1,10 @@
+set hive.fetch.task.conversion=more;
+set hive.optimize.constant.propagation=true;
+
+EXPLAIN
+SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86;
+
+SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86;
+
Index: ql/src/test/queries/clientpositive/annotate_stats_select.q
===================================================================
--- ql/src/test/queries/clientpositive/annotate_stats_select.q (revision 1558643)
+++ ql/src/test/queries/clientpositive/annotate_stats_select.q (working copy)
@@ -1,4 +1,5 @@
set hive.stats.fetch.column.stats=true;
+set hive.optimize.constant.propagation=false;
create table if not exists alltypes (
bo1 boolean,
Index: ql/src/test/queries/clientpositive/alter_partition_coltype.q
===================================================================
--- ql/src/test/queries/clientpositive/alter_partition_coltype.q (revision 1558643)
+++ ql/src/test/queries/clientpositive/alter_partition_coltype.q (working copy)
@@ -1,3 +1,5 @@
+set hive.optimize.constant.propagation=false;
+
-- create testing table.
create table alter_coltype(key string, value string) partitioned by (dt string, ts string);
Index: ql/src/test/queries/clientpositive/constprog1.q
===================================================================
--- ql/src/test/queries/clientpositive/constprog1.q (revision 0)
+++ ql/src/test/queries/clientpositive/constprog1.q (revision 0)
@@ -0,0 +1,9 @@
+set hive.fetch.task.conversion=more;
+set hive.optimize.constant.propagation=true;
+
+EXPLAIN
+SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows);
+
+SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows);
Index: ql/src/test/queries/clientpositive/annotate_stats_filter.q
===================================================================
--- ql/src/test/queries/clientpositive/annotate_stats_filter.q (revision 1558643)
+++ ql/src/test/queries/clientpositive/annotate_stats_filter.q (working copy)
@@ -1,4 +1,5 @@
set hive.stats.fetch.column.stats=true;
+set hive.optimize.constant.propagation=false;
create table if not exists loc_staging (
state string,
Index: ql/src/test/results/compiler/plan/cast1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/cast1.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/cast1.q.xml (working copy)
@@ -1,1173 +1,1222 @@
-
-#### A masked pattern was here ####
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0,_col1,_col2,_col3,_col4,_col5,_col6
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ int:double:double:double:int:boolean:int
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+ int
+
+
+
+
+ int
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+ double
+
+
+
+
+ double
+
+
+
+
+
+
+ _col2
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _col3
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _col4
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _col5
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col6
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col6
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFToInteger
+
+
+ UDFToInteger
+
+
+
+
+
+
+
+
+
+ _col5
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFToBoolean
+
+
+ UDFToBoolean
+
+
+
+
+
+
+
+
+
+ _col4
+
+
+
+
+
+
+
+
+
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2.0
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFToInteger
+
+
+ UDFToInteger
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col3
+
+
+
+
+
+
+
+
+
+ 3.0
+
+
+
+
+
+
+
+
+
+ 2.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col2
+
+
+
+
+
+
+
+
+
+ 3
+
+
+
+
+
+
+
+
+
+ 2.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+ 3.0
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+ 3
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 5
+
+
+
+
+
+
+
+
+
+ 5.0
+
+
+
+
+
+
+
+
+
+ 5.0
+
+
+
+
+
+
+
+
+
+ 5.0
+
+
+
+
+
+
+
+
+
+ 5
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+ _col2
+
+
+ _col3
+
+
+ _col4
+
+
+ _col5
+
+
+ _col6
+
+
+
+
+
+
+ SEL_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _c0
+
+
+ _col0
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _c1
+
+
+ _col1
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _c2
+
+
+ _col2
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _c3
+
+
+ _col3
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _c4
+
+
+ _col4
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _c5
+
+
+ _col5
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c6
+
+
+ _col6
+
+
+
+
+
+ int
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+ 86
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ FIL_4
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+ key
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/join1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join1.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/join1.q.xml (working copy)
@@ -1,1674 +1,1682 @@
-
-#### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- Stage-2
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
-
-
- HIVE_DEFAULT_LIST_BUCKETING_KEY
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.dest1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct dest1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src2
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
- src1
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src2
-
-
-
-
-
-
-
- VALUE._col1
-
-
- value
-
-
- src2
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col0
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- src2
-
-
- RS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col1
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src2
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src2
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
- VALUE._col0
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- src1
-
-
- RS_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
- key
-
-
-
-
- TS_1
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src1
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
- #### A masked pattern was here ####
-
-
- src2
-
-
- src1
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
- true
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
- #### A masked pattern was here ####
-
-
-
- true
-
-
-
-
-
- 150
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
-
- 1
-
-
-
-
- FS_6
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- _col5
-
-
- src2
-
-
-
-
-
-
-
- _col0
-
-
- _col0
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_5
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col5
-
-
- VALUE._col1
-
-
- src2
-
-
-
-
-
-
-
- _col0
-
-
- VALUE._col0
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
-
- 1
-
-
-
-
-
- true
-
-
-
-
-
-
-
- _col0
-
-
- _col5
-
-
-
-
-
-
- _col7
- 1
-
-
- _col6
- 1
-
-
- _col5
- 1
-
-
- _col4
- 1
-
-
- _col3
- 0
-
-
- _col2
- 0
-
-
- _col1
- 0
-
-
- _col0
- 0
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- JOIN_4
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
- src1
-
-
-
-
- 1
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- _col5
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ Stage-2
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
+
+
+ HIVE_DEFAULT_LIST_BUCKETING_KEY
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.dest1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct dest1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src2
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+ src1
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src2
+
+
+
+
+
+
+
+ VALUE._col1
+
+
+ value
+
+
+ src2
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col0
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ src2
+
+
+ RS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col1
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src2
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src2
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ src1
+
+
+ RS_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+ key
+
+
+
+
+ TS_1
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src1
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+ #### A masked pattern was here ####
+
+
+ src2
+
+
+ src1
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+ true
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ true
+
+
+
+
+
+ 150
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+
+ 1
+
+
+
+
+ FS_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ _col5
+
+
+ src2
+
+
+
+
+
+
+
+ _col0
+
+
+ _col0
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col5
+
+
+ VALUE._col1
+
+
+ src2
+
+
+
+
+
+
+
+ _col0
+
+
+ VALUE._col0
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+ 1
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+ _col0
+
+
+ _col5
+
+
+
+
+
+
+ _col7
+ 1
+
+
+ _col6
+ 1
+
+
+ _col5
+ 1
+
+
+ _col4
+ 1
+
+
+ _col3
+ 0
+
+
+ _col2
+ 0
+
+
+ _col1
+ 0
+
+
+ _col0
+ 0
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ JOIN_4
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ src1
+
+
+
+
+ 1
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col5
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/join2.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join2.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/join2.q.xml (working copy)
@@ -1,2874 +1,2890 @@
-
-#### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-3
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
-
-
- HIVE_DEFAULT_LIST_BUCKETING_KEY
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.dest1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct dest1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-2
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
- src3
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- $INTNAME
-
-
-
-
-
-
-
- VALUE._col4
-
-
- _col0
-
-
- src1
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- src1
-
-
-
-
-
-
-
-
-
- _col4
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- double
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col4
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col4
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- RS_6
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col4
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- _col0
-
-
- _col4
-
-
-
-
- TS_12
-
-
-
-
-
-
-
-
- _col0
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- _col4
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
- src3
-
-
-
-
-
-
-
- VALUE._col1
-
-
- value
-
-
- src3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src3
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFToDouble
-
-
- UDFToDouble
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- double
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- src3
-
-
- RS_7
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col1
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src3
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_1
-
-
-
-
-
-
-
-
- key
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src3
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
- #### A masked pattern was here ####
-
-
- $INTNAME
-
-
-
-
- #### A masked pattern was here ####
-
-
- src3
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- -mr-10001
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0,_col4
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string,string
-
-
- escape.delim
- \
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
- true
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
- #### A masked pattern was here ####
-
-
-
- true
-
-
-
-
-
- 150
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
-
- 1
-
-
-
-
- FS_10
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- _col9
-
-
- src3
-
-
-
-
-
-
-
- _col0
-
-
- _col4
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_9
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col4
-
-
- VALUE._col4
-
-
- src1
-
-
-
-
-
-
-
- _col9
-
-
- VALUE._col1
-
-
- src3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
-
- 1
-
-
-
-
-
- true
-
-
-
-
-
-
-
- _col4
-
-
- _col9
-
-
-
-
-
-
- _col8
- 1
-
-
- _col7
- 0
-
-
- _col11
- 1
-
-
- _col6
- 0
-
-
- _col10
- 1
-
-
- _col5
- 0
-
-
- _col4
- 0
-
-
- _col3
- 0
-
-
- _col2
- 0
-
-
- _col1
- 0
-
-
- _col0
- 0
-
-
- _col9
- 1
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- JOIN_8
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
- src2
-
-
- src1
-
-
-
-
- 1
-
-
- src3
-
-
-
-
-
-
-
-
-
-
-
-
- _col4
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- _col9
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-1
-
-
-
-
-
-
- src2
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
- src1
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src2
-
-
-
-
-
-
-
- VALUE._col0
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- src2
-
-
- RS_4
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src2
-
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
- key
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src2
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
- VALUE._col0
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- src1
-
-
- RS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
- key
-
-
-
-
- TS_2
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src1
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src2
-
-
- src1
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -1
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
-
-
-
- 1
-
-
-
-
- FS_11
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col4
-
-
- VALUE._col0
-
-
- src2
-
-
-
-
-
-
-
- _col0
-
-
- VALUE._col0
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
-
- 1
-
-
-
-
-
- true
-
-
-
-
-
-
-
- _col0
-
-
- _col4
-
-
-
-
-
-
- _col7
- 1
-
-
- _col6
- 1
-
-
- _col5
- 1
-
-
- _col4
- 1
-
-
- _col3
- 0
-
-
- _col2
- 0
-
-
- _col1
- 0
-
-
- _col0
- 0
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- JOIN_5
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
- src1
-
-
-
-
- 1
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-3
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
+
+
+ HIVE_DEFAULT_LIST_BUCKETING_KEY
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.dest1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct dest1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-2
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+ src3
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ $INTNAME
+
+
+
+
+
+
+
+ VALUE._col4
+
+
+ _col0
+
+
+ src1
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ src1
+
+
+
+
+
+
+
+
+
+ _col4
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ double
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col4
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col4
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ RS_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col4
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ _col0
+
+
+ _col4
+
+
+
+
+ TS_12
+
+
+
+
+
+
+
+
+ _col0
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col4
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+ src3
+
+
+
+
+
+
+
+ VALUE._col1
+
+
+ value
+
+
+ src3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src3
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFToDouble
+
+
+ UDFToDouble
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ double
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ src3
+
+
+ RS_7
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col1
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src3
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_1
+
+
+
+
+
+
+
+
+ key
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src3
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+ #### A masked pattern was here ####
+
+
+ $INTNAME
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src3
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ -mr-10001
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0,_col4
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string,string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+ true
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ true
+
+
+
+
+
+ 150
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+
+ 1
+
+
+
+
+ FS_10
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ _col9
+
+
+ src3
+
+
+
+
+
+
+
+ _col0
+
+
+ _col4
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_9
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col4
+
+
+ VALUE._col4
+
+
+ src1
+
+
+
+
+
+
+
+ _col9
+
+
+ VALUE._col1
+
+
+ src3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+ 1
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+ _col4
+
+
+ _col9
+
+
+
+
+
+
+ _col8
+ 1
+
+
+ _col7
+ 0
+
+
+ _col11
+ 1
+
+
+ _col6
+ 0
+
+
+ _col10
+ 1
+
+
+ _col5
+ 0
+
+
+ _col4
+ 0
+
+
+ _col3
+ 0
+
+
+ _col2
+ 0
+
+
+ _col1
+ 0
+
+
+ _col0
+ 0
+
+
+ _col9
+ 1
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ JOIN_8
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ src2
+
+
+ src1
+
+
+
+
+ 1
+
+
+ src3
+
+
+
+
+
+
+
+
+
+
+
+
+ _col4
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col9
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-1
+
+
+
+
+
+
+ src2
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+ src1
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src2
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ src2
+
+
+ RS_4
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src2
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+ key
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src2
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ src1
+
+
+ RS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+ key
+
+
+
+
+ TS_2
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src1
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src2
+
+
+ src1
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+
+
+
+ 1
+
+
+
+
+ FS_11
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col4
+
+
+ VALUE._col0
+
+
+ src2
+
+
+
+
+
+
+
+ _col0
+
+
+ VALUE._col0
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+ 1
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+ _col0
+
+
+ _col4
+
+
+
+
+
+
+ _col7
+ 1
+
+
+ _col6
+ 1
+
+
+ _col5
+ 1
+
+
+ _col4
+ 1
+
+
+ _col3
+ 0
+
+
+ _col2
+ 0
+
+
+ _col1
+ 0
+
+
+ _col0
+ 0
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ JOIN_5
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ src1
+
+
+
+
+ 1
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/join3.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join3.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/join3.q.xml (working copy)
@@ -1,2139 +1,2151 @@
-
-#### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- Stage-2
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
-
-
- HIVE_DEFAULT_LIST_BUCKETING_KEY
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.dest1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct dest1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src2
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
- src3
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
- src1
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src2
-
-
-
-
-
-
-
- VALUE._col0
-
-
- key
-
-
- src2
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
-
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
-
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- src2
-
-
- RS_4
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src2
-
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
- key
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src2
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
- src3
-
-
-
-
-
-
-
- VALUE._col1
-
-
- value
-
-
- src3
-
-
-
-
-
-
-
- VALUE._col0
-
-
- key
-
-
- src3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src3
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- src3
-
-
- RS_5
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col1
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src3
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_1
-
-
-
-
-
-
-
-
- key
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src3
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
- VALUE._col0
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- src1
-
-
- RS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
- key
-
-
-
-
- TS_2
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src1
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
- #### A masked pattern was here ####
-
-
- src2
-
-
- src3
-
-
- src1
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
- true
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
- #### A masked pattern was here ####
-
-
-
- true
-
-
-
-
-
- 150
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
-
- 1
-
-
-
-
- FS_8
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- _col9
-
-
- src3
-
-
-
-
-
-
-
- _col0
-
-
- _col0
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_7
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- VALUE._col0
-
-
- src1
-
-
-
-
-
-
-
- _col9
-
-
- VALUE._col1
-
-
- src3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
-
- 1
-
-
-
- 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
-
- 1
-
-
-
- 2
-
-
-
-
-
- true
-
-
-
-
-
-
-
- _col0
-
-
- _col9
-
-
-
-
-
-
- _col8
- 2
-
-
- _col7
- 1
-
-
- _col11
- 2
-
-
- _col6
- 1
-
-
- _col10
- 2
-
-
- _col5
- 1
-
-
- _col4
- 1
-
-
- _col3
- 0
-
-
- _col2
- 0
-
-
- _col1
- 0
-
-
- _col0
- 0
-
-
- _col9
- 2
-
-
-
-
-
-
- 0
-
-
- 1
-
-
- 2
-
-
-
-
-
-
- JOIN_6
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
- src1
-
-
-
-
- 1
-
-
- src2
-
-
-
-
- 2
-
-
- src3
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- _col9
-
-
- src3
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ Stage-2
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
+
+
+ HIVE_DEFAULT_LIST_BUCKETING_KEY
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.dest1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct dest1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src2
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+ src3
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+ src1
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src2
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ key
+
+
+ src2
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ src2
+
+
+ RS_4
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src2
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+ key
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src2
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+ src3
+
+
+
+
+
+
+
+ VALUE._col1
+
+
+ value
+
+
+ src3
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ key
+
+
+ src3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src3
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ src3
+
+
+ RS_5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col1
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src3
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_1
+
+
+
+
+
+
+
+
+ key
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src3
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ src1
+
+
+ RS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+ key
+
+
+
+
+ TS_2
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src1
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+ #### A masked pattern was here ####
+
+
+ src2
+
+
+ src3
+
+
+ src1
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+ true
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ true
+
+
+
+
+
+ 150
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+
+ 1
+
+
+
+
+ FS_8
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ _col9
+
+
+ src3
+
+
+
+
+
+
+
+ _col0
+
+
+ _col0
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_7
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ VALUE._col0
+
+
+ src1
+
+
+
+
+
+
+
+ _col9
+
+
+ VALUE._col1
+
+
+ src3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+
+ 1
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+ 1
+
+
+
+ 2
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+ _col0
+
+
+ _col9
+
+
+
+
+
+
+ _col8
+ 2
+
+
+ _col7
+ 1
+
+
+ _col11
+ 2
+
+
+ _col6
+ 1
+
+
+ _col10
+ 2
+
+
+ _col5
+ 1
+
+
+ _col4
+ 1
+
+
+ _col3
+ 0
+
+
+ _col2
+ 0
+
+
+ _col1
+ 0
+
+
+ _col0
+ 0
+
+
+ _col9
+ 2
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+ 2
+
+
+
+
+
+
+ JOIN_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ src1
+
+
+
+
+ 1
+
+
+ src2
+
+
+
+
+ 2
+
+
+ src3
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col9
+
+
+ src3
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/join4.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join4.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/join4.q.xml (working copy)
@@ -1,2213 +1,2217 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- c:a:src1
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
- c:b:src2
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- c:a:src1
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col1
-
-
- _col1
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col0
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0,_col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string,string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- a
-
-
- RS_6
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col1
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- value
-
-
- src1
-
-
-
-
-
-
-
- _col0
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_5
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
- int
-
-
-
-
- 10
-
-
-
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
- 20
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- FIL_12
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src1
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
- c:b:src2
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col1
-
-
- _col1
-
-
-
-
-
-
-
- VALUE._col0
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0,_col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string,string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- b
-
-
- RS_7
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col1
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- value
-
-
- src2
-
-
-
-
-
-
-
- _col0
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
- 15
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
- 25
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- FIL_13
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src2
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src2
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- c:a:src1
-
-
- c:b:src2
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0,_col1,_col2,_col3
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- string:string:string:string
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_11
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col2
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col3
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col3
-
-
- _col3
-
-
- b
-
-
-
-
-
-
-
- _col2
-
-
- _col2
-
-
- b
-
-
-
-
-
-
-
- _col1
-
-
- _col1
-
-
- a
-
-
-
-
-
-
-
- _col0
-
-
- _col0
-
-
- a
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
- _col2
-
-
- _col3
-
-
-
-
-
-
- SEL_9
-
-
-
-
-
-
-
-
-
-
-
-
-
- c1
-
-
- _col0
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
- c2
-
-
- _col1
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
- c3
-
-
- _col2
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
- c4
-
-
- _col3
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col3
-
-
- VALUE._col1
-
-
- b
-
-
-
-
-
-
-
- _col2
-
-
- VALUE._col0
-
-
- b
-
-
-
-
-
-
-
- _col1
-
-
- VALUE._col1
-
-
- a
-
-
-
-
-
-
-
- _col0
-
-
- VALUE._col0
-
-
- a
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
- 0
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
- _col2
-
-
- _col3
-
-
-
-
-
-
- _col3
- 1
-
-
- _col2
- 1
-
-
- _col1
- 0
-
-
- _col0
- 0
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- JOIN_8
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
- a
-
-
-
-
- 1
-
-
- b
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
- _col2
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
- _col3
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ c:a:src1
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+ c:b:src2
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ c:a:src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col1
+
+
+ _col1
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col0
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0,_col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string,string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ a
+
+
+ RS_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col1
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ value
+
+
+ src1
+
+
+
+
+
+
+
+ _col0
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+ 10
+
+
+
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ FIL_12
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src1
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+ c:b:src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col1
+
+
+ _col1
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0,_col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string,string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ b
+
+
+ RS_7
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col1
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ value
+
+
+ src2
+
+
+
+
+
+
+
+ _col0
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+ 15
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+ 25
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ FIL_13
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src2
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src2
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ c:a:src1
+
+
+ c:b:src2
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0,_col1,_col2,_col3
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ string:string:string:string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_11
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col2
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col3
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col3
+
+
+ _col3
+
+
+ b
+
+
+
+
+
+
+
+ _col2
+
+
+ _col2
+
+
+ b
+
+
+
+
+
+
+
+ _col1
+
+
+ _col1
+
+
+ a
+
+
+
+
+
+
+
+ _col0
+
+
+ _col0
+
+
+ a
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+ _col2
+
+
+ _col3
+
+
+
+
+
+
+ SEL_9
+
+
+
+
+
+
+
+
+
+
+
+
+
+ c1
+
+
+ _col0
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+ c2
+
+
+ _col1
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+ c3
+
+
+ _col2
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+ c4
+
+
+ _col3
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col3
+
+
+ VALUE._col1
+
+
+ b
+
+
+
+
+
+
+
+ _col2
+
+
+ VALUE._col0
+
+
+ b
+
+
+
+
+
+
+
+ _col1
+
+
+ VALUE._col1
+
+
+ a
+
+
+
+
+
+
+
+ _col0
+
+
+ VALUE._col0
+
+
+ a
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+ 1
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ 0
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+ _col2
+
+
+ _col3
+
+
+
+
+
+
+ _col3
+ 1
+
+
+ _col2
+ 1
+
+
+ _col1
+ 0
+
+
+ _col0
+ 0
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ JOIN_8
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ a
+
+
+
+
+ 1
+
+
+ b
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col2
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col3
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/join5.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join5.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/join5.q.xml (working copy)
@@ -1,2209 +1,2213 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- c:a:src1
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
- c:b:src2
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- c:a:src1
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col1
-
-
- _col1
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col0
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0,_col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string,string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- a
-
-
- RS_6
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col1
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- value
-
-
- src1
-
-
-
-
-
-
-
- _col0
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_5
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
- int
-
-
-
-
- 10
-
-
-
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
- 20
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- FIL_12
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src1
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
- c:b:src2
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col1
-
-
- _col1
-
-
-
-
-
-
-
- VALUE._col0
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0,_col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string,string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- b
-
-
- RS_7
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col1
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- value
-
-
- src2
-
-
-
-
-
-
-
- _col0
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
- 15
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
- 25
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- FIL_13
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src2
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src2
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- c:a:src1
-
-
- c:b:src2
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0,_col1,_col2,_col3
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- string:string:string:string
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_11
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col2
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col3
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col3
-
-
- _col3
-
-
- b
-
-
-
-
-
-
-
- _col2
-
-
- _col2
-
-
- b
-
-
-
-
-
-
-
- _col1
-
-
- _col1
-
-
- a
-
-
-
-
-
-
-
- _col0
-
-
- _col0
-
-
- a
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
- _col2
-
-
- _col3
-
-
-
-
-
-
- SEL_9
-
-
-
-
-
-
-
-
-
-
-
-
-
- c1
-
-
- _col0
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
- c2
-
-
- _col1
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
- c3
-
-
- _col2
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
- c4
-
-
- _col3
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col3
-
-
- VALUE._col1
-
-
- b
-
-
-
-
-
-
-
- _col2
-
-
- VALUE._col0
-
-
- b
-
-
-
-
-
-
-
- _col1
-
-
- VALUE._col1
-
-
- a
-
-
-
-
-
-
-
- _col0
-
-
- VALUE._col0
-
-
- a
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
- 2
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
- _col2
-
-
- _col3
-
-
-
-
-
-
- _col3
- 1
-
-
- _col2
- 1
-
-
- _col1
- 0
-
-
- _col0
- 0
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- JOIN_8
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
- a
-
-
-
-
- 1
-
-
- b
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
- _col2
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
- _col3
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ c:a:src1
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+ c:b:src2
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ c:a:src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col1
+
+
+ _col1
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col0
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0,_col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string,string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ a
+
+
+ RS_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col1
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ value
+
+
+ src1
+
+
+
+
+
+
+
+ _col0
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+ 10
+
+
+
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ FIL_12
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src1
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+ c:b:src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col1
+
+
+ _col1
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0,_col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string,string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ b
+
+
+ RS_7
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col1
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ value
+
+
+ src2
+
+
+
+
+
+
+
+ _col0
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+ 15
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+ 25
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ FIL_13
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src2
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src2
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ c:a:src1
+
+
+ c:b:src2
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0,_col1,_col2,_col3
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ string:string:string:string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_11
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col2
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col3
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col3
+
+
+ _col3
+
+
+ b
+
+
+
+
+
+
+
+ _col2
+
+
+ _col2
+
+
+ b
+
+
+
+
+
+
+
+ _col1
+
+
+ _col1
+
+
+ a
+
+
+
+
+
+
+
+ _col0
+
+
+ _col0
+
+
+ a
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+ _col2
+
+
+ _col3
+
+
+
+
+
+
+ SEL_9
+
+
+
+
+
+
+
+
+
+
+
+
+
+ c1
+
+
+ _col0
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+ c2
+
+
+ _col1
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+ c3
+
+
+ _col2
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+ c4
+
+
+ _col3
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col3
+
+
+ VALUE._col1
+
+
+ b
+
+
+
+
+
+
+
+ _col2
+
+
+ VALUE._col0
+
+
+ b
+
+
+
+
+
+
+
+ _col1
+
+
+ VALUE._col1
+
+
+ a
+
+
+
+
+
+
+
+ _col0
+
+
+ VALUE._col0
+
+
+ a
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+ 2
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+ _col2
+
+
+ _col3
+
+
+
+
+
+
+ _col3
+ 1
+
+
+ _col2
+ 1
+
+
+ _col1
+ 0
+
+
+ _col0
+ 0
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ JOIN_8
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ a
+
+
+
+
+ 1
+
+
+ b
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col2
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col3
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/input6.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/input6.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/input6.q.xml (working copy)
@@ -1,1164 +1,1166 @@
-
-#### A masked pattern was here ####
-
-
-
-
-
-
- Stage-7
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-2
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-3
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.dest1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct dest1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- 1
-
-
-
-
- FS_6
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
-
-
-
-
-
- string
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_5
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.io.CombineHiveInputFormat
-
-
- true
-
-
-
- #### A masked pattern was here ####
-
-
- #### A masked pattern was here ####
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- -ext-10001
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.dest1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct dest1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-6
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-5
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
-
-
- HIVE_DEFAULT_LIST_BUCKETING_KEY
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-4
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src1
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src1
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 216
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
- #### A masked pattern was here ####
-
-
-
- true
-
-
-
-
-
- 150
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
-
- 1
-
-
-
-
- FS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- value
-
-
- src1
-
-
-
-
-
-
-
- _col0
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
-
-
- FIL_4
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src1
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
- #### A masked pattern was here ####
-
-
- src1
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src1
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src1
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 216
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src1
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 216
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+
+
+
+
+ Stage-7
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-2
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-3
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.dest1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct dest1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+
+
+
+
+
+ string
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_5
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.io.CombineHiveInputFormat
+
+
+ true
+
+
+
+ #### A masked pattern was here ####
+
+
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ -ext-10001
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.dest1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct dest1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
+
+
+ HIVE_DEFAULT_LIST_BUCKETING_KEY
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-4
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src1
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src1
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 216
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ true
+
+
+
+
+
+ 150
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+
+ 1
+
+
+
+
+ FS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ value
+
+
+ src1
+
+
+
+
+
+
+
+ _col0
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+
+
+ FIL_4
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src1
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+ #### A masked pattern was here ####
+
+
+ src1
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src1
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src1
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 216
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src1
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 216
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/join8.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join8.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/join8.q.xml (working copy)
@@ -1,2350 +1,2358 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- c:a:src1
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
- c:b:src2
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- c:a:src1
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col1
-
-
- _col1
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col0
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0,_col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string,string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- a
-
-
- RS_6
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col1
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- value
-
-
- src1
-
-
-
-
-
-
-
- _col0
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_5
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
- int
-
-
-
-
- 10
-
-
-
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
- 20
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- FIL_14
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src1
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src1
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
- c:b:src2
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col1
-
-
- _col1
-
-
-
-
-
-
-
- VALUE._col0
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- joinkey0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- reducesinkkey0
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0,_col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- string,string
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- b
-
-
- RS_7
-
-
-
-
-
-
-
-
-
-
-
-
-
- VALUE._col0
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col1
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- value
-
-
- src2
-
-
-
-
-
-
-
- _col0
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
- 15
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
- 25
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- FIL_15
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src2
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src2
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src2
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- c:a:src1
-
-
- c:b:src2
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0,_col1,_col2,_col3
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- string:string:string:string
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_12
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col2
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col3
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col3
-
-
- _col3
-
-
- b
-
-
-
-
-
-
-
- _col2
-
-
- _col2
-
-
- b
-
-
-
-
-
-
-
- _col1
-
-
- _col1
-
-
- a
-
-
-
-
-
-
-
- _col0
-
-
- _col0
-
-
- a
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
- _col2
-
-
- _col3
-
-
-
-
-
-
- SEL_9
-
-
-
-
-
-
-
-
-
-
-
-
-
- c1
-
-
- _col0
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
- c2
-
-
- _col1
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
- c3
-
-
- _col2
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
- c4
-
-
- _col3
-
-
- c
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- FIL_13
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
- a
-
-
-
-
-
- string
-
-
-
-
-
-
- _col2
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
- _col3
-
-
- b
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col3
-
-
- VALUE._col1
-
-
- b
-
-
-
-
-
-
-
- _col2
-
-
- VALUE._col0
-
-
- b
-
-
-
-
-
-
-
- _col1
-
-
- VALUE._col1
-
-
- a
-
-
-
-
-
-
-
- _col0
-
-
- VALUE._col0
-
-
- a
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
- 0
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
- _col2
-
-
- _col3
-
-
-
-
-
-
- _col3
- 1
-
-
- _col2
- 1
-
-
- _col1
- 0
-
-
- _col0
- 0
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- JOIN_8
-
-
-
-
-
-
-
-
-
-
-
-
- 0
-
-
- a
-
-
-
-
- 1
-
-
- b
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ c:a:src1
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+ c:b:src2
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ c:a:src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col1
+
+
+ _col1
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col0
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0,_col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string,string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ a
+
+
+ RS_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col1
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ value
+
+
+ src1
+
+
+
+
+
+
+
+ _col0
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+ 10
+
+
+
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ FIL_14
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src1
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src1
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+ c:b:src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col1
+
+
+ _col1
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ joinkey0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ reducesinkkey0
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0,_col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ string,string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ b
+
+
+ RS_7
+
+
+
+
+
+
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col1
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ value
+
+
+ src2
+
+
+
+
+
+
+
+ _col0
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+ 15
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+ 25
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ FIL_15
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src2
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src2
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src2
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ c:a:src1
+
+
+ c:b:src2
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0,_col1,_col2,_col3
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ string:string:string:string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_12
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col2
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col3
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col3
+
+
+ _col3
+
+
+ b
+
+
+
+
+
+
+
+ _col2
+
+
+ _col2
+
+
+ b
+
+
+
+
+
+
+
+ _col1
+
+
+ _col1
+
+
+ a
+
+
+
+
+
+
+
+ _col0
+
+
+ _col0
+
+
+ a
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+ _col2
+
+
+ _col3
+
+
+
+
+
+
+ SEL_9
+
+
+
+
+
+
+
+
+
+
+
+
+
+ c1
+
+
+ _col0
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+ c2
+
+
+ _col1
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+ c3
+
+
+ _col2
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+ c4
+
+
+ _col3
+
+
+ c
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ FIL_13
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+ a
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col2
+
+
+ b
+
+
+
+
+ void
+
+
+
+
+ string
+
+
+
+
+
+
+ _col3
+
+
+ b
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col3
+
+
+ VALUE._col1
+
+
+ b
+
+
+
+
+
+
+
+ _col2
+
+
+ VALUE._col0
+
+
+ b
+
+
+
+
+
+
+
+ _col1
+
+
+ VALUE._col1
+
+
+ a
+
+
+
+
+
+
+
+ _col0
+
+
+ VALUE._col0
+
+
+ a
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+ 1
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ 0
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+ _col2
+
+
+ _col3
+
+
+
+
+
+
+ _col3
+ 1
+
+
+ _col2
+ 1
+
+
+ _col1
+ 0
+
+
+ _col0
+ 0
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ JOIN_8
+
+
+
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ a
+
+
+
+
+ 1
+
+
+ b
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/udf1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/udf1.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/udf1.q.xml (working copy)
@@ -1,2082 +1,2201 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:string:string:string:string
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
- boolean
-
-
-
-
- boolean
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col2
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col3
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col4
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col5
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col6
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col7
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col8
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col9
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col10
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col11
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col12
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _col13
-
-
-
-
-
-
-
- string
-
-
-
-
- string
-
-
-
-
-
-
- _col14
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col15
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col16
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col8
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- .*
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFRegExp
-
-
- rlike
-
-
-
-
-
-
-
-
-
- _col7
-
-
-
-
-
-
-
-
-
- ab
-
-
-
-
-
-
-
-
-
- a
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFLike
-
-
- like
-
-
-
-
-
-
-
-
-
- _col6
-
-
-
-
-
-
-
-
-
- ab
-
-
-
-
-
-
-
-
-
- _a%
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFLike
-
-
- like
-
-
-
-
-
-
-
-
-
- _col5
-
-
-
-
-
-
-
-
-
- ab
-
-
-
-
-
-
-
-
-
- \%\_
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFLike
-
-
- like
-
-
-
-
-
-
-
-
-
- _col4
-
-
-
-
-
-
-
-
-
- %_
-
-
-
-
-
-
-
-
-
- \%\_
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFLike
-
-
- like
-
-
-
-
-
-
-
-
-
- _col3
-
-
-
-
-
-
-
-
-
- ab
-
-
-
-
-
-
-
-
-
- %a_
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFLike
-
-
- like
-
-
-
-
-
-
-
-
-
- _col2
-
-
-
-
-
-
-
-
-
- ab
-
-
-
-
-
-
-
-
-
- %a%
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFLike
-
-
- like
-
-
-
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
-
- b
-
-
-
-
-
-
-
-
-
- %a%
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFLike
-
-
- like
-
-
-
-
-
-
-
-
-
- _col9
-
-
-
-
-
-
-
-
-
- a
-
-
-
-
-
-
-
-
-
- [ab]
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFRegExp
-
-
- rlike
-
-
-
-
-
-
-
-
-
- _col13
-
-
-
-
-
-
-
-
-
- abc
-
-
-
-
-
-
-
-
-
- b
-
-
-
-
-
-
-
-
-
- c
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFRegExpReplace
-
-
- regexp_replace
-
-
-
-
-
-
-
-
-
- _col12
-
-
-
-
-
-
-
-
-
- hadoop
-
-
-
-
-
-
-
-
-
- o*
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFRegExp
-
-
- rlike
-
-
-
-
-
-
-
-
-
- _col11
-
-
-
-
-
-
-
-
-
- hadoop
-
-
-
-
-
-
-
-
-
- [a-z]*
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFRegExp
-
-
- rlike
-
-
-
-
-
-
-
-
-
- _col10
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- [ab]
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFRegExp
-
-
- rlike
-
-
-
-
-
-
-
-
-
- _col16
-
-
-
-
-
-
-
-
-
- hadoop
-
-
-
-
-
-
-
-
-
- (.)[a-z]*
-
-
-
-
-
-
-
-
-
- $1ive
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFRegExpReplace
-
-
- regexp_replace
-
-
-
-
-
-
-
-
-
- _col15
-
-
-
-
-
-
-
-
-
- abbbb
-
-
-
-
-
-
-
-
-
- bb
-
-
-
-
-
-
-
-
-
- b
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFRegExpReplace
-
-
- regexp_replace
-
-
-
-
-
-
-
-
-
- _col14
-
-
-
-
-
-
-
-
-
- abc
-
-
-
-
-
-
-
-
-
- z
-
-
-
-
-
-
-
-
-
- a
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFRegExpReplace
-
-
- regexp_replace
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
- a
-
-
-
-
-
-
-
-
-
- %a%
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFLike
-
-
- like
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
- _col2
-
-
- _col3
-
-
- _col4
-
-
- _col5
-
-
- _col6
-
-
- _col7
-
-
- _col8
-
-
- _col9
-
-
- _col10
-
-
- _col11
-
-
- _col12
-
-
- _col13
-
-
- _col14
-
-
- _col15
-
-
- _col16
-
-
-
-
-
-
- SEL_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _c0
-
-
- _col0
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c1
-
-
- _col1
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c2
-
-
- _col2
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c3
-
-
- _col3
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c4
-
-
- _col4
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c5
-
-
- _col5
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c6
-
-
- _col6
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c7
-
-
- _col7
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c8
-
-
- _col8
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c9
-
-
- _col9
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c10
-
-
- _col10
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c11
-
-
- _col11
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c12
-
-
- _col12
-
-
-
-
-
- boolean
-
-
-
-
-
-
- _c13
-
-
- _col13
-
-
-
-
-
- string
-
-
-
-
-
-
- _c14
-
-
- _col14
-
-
-
-
-
- string
-
-
-
-
-
-
- _c15
-
-
- _col15
-
-
-
-
-
- string
-
-
-
-
-
-
- _c16
-
-
- _col16
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
- int
-
-
-
-
- 86
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- FIL_4
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
- key
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:string:string:string:string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col2
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col3
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col4
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col5
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col6
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col7
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col8
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col9
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col10
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col11
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col12
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _col13
+
+
+
+
+
+
+
+ string
+
+
+
+
+ string
+
+
+
+
+
+
+ _col14
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col15
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col16
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col8
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFRegExp
+
+
+ rlike
+
+
+
+
+
+
+
+
+
+ _col7
+
+
+
+
+
+
+
+
+
+ ab
+
+
+
+
+
+
+
+
+
+ a
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFLike
+
+
+ like
+
+
+
+
+
+
+
+
+
+ _col6
+
+
+
+
+
+
+
+
+
+ ab
+
+
+
+
+
+
+
+
+
+ _a%
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFLike
+
+
+ like
+
+
+
+
+
+
+
+
+
+ _col5
+
+
+
+
+
+
+
+
+
+ ab
+
+
+
+
+
+
+
+
+
+ \%\_
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFLike
+
+
+ like
+
+
+
+
+
+
+
+
+
+ _col4
+
+
+
+
+
+
+
+
+
+ %_
+
+
+
+
+
+
+
+
+
+ \%\_
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFLike
+
+
+ like
+
+
+
+
+
+
+
+
+
+ _col3
+
+
+
+
+
+
+
+
+
+ ab
+
+
+
+
+
+
+
+
+
+ %a_
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFLike
+
+
+ like
+
+
+
+
+
+
+
+
+
+ _col2
+
+
+
+
+
+
+
+
+
+ ab
+
+
+
+
+
+
+
+
+
+ %a%
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFLike
+
+
+ like
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+ b
+
+
+
+
+
+
+
+
+
+ %a%
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFLike
+
+
+ like
+
+
+
+
+
+
+
+
+
+ _col9
+
+
+
+
+
+
+
+
+
+ a
+
+
+
+
+
+
+
+
+
+ [ab]
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFRegExp
+
+
+ rlike
+
+
+
+
+
+
+
+
+
+ _col13
+
+
+
+
+
+
+
+
+
+ abc
+
+
+
+
+
+
+
+
+
+ b
+
+
+
+
+
+
+
+
+
+ c
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFRegExpReplace
+
+
+ regexp_replace
+
+
+
+
+
+
+
+
+
+ _col12
+
+
+
+
+
+
+
+
+
+ hadoop
+
+
+
+
+
+
+
+
+
+ o*
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFRegExp
+
+
+ rlike
+
+
+
+
+
+
+
+
+
+ _col11
+
+
+
+
+
+
+
+
+
+ hadoop
+
+
+
+
+
+
+
+
+
+ [a-z]*
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFRegExp
+
+
+ rlike
+
+
+
+
+
+
+
+
+
+ _col10
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ [ab]
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFRegExp
+
+
+ rlike
+
+
+
+
+
+
+
+
+
+ _col16
+
+
+
+
+
+
+
+
+
+ hadoop
+
+
+
+
+
+
+
+
+
+ (.)[a-z]*
+
+
+
+
+
+
+
+
+
+ $1ive
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFRegExpReplace
+
+
+ regexp_replace
+
+
+
+
+
+
+
+
+
+ _col15
+
+
+
+
+
+
+
+
+
+ abbbb
+
+
+
+
+
+
+
+
+
+ bb
+
+
+
+
+
+
+
+
+
+ b
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFRegExpReplace
+
+
+ regexp_replace
+
+
+
+
+
+
+
+
+
+ _col14
+
+
+
+
+
+
+
+
+
+ abc
+
+
+
+
+
+
+
+
+
+ z
+
+
+
+
+
+
+
+
+
+ a
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFRegExpReplace
+
+
+ regexp_replace
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+ a
+
+
+
+
+
+
+
+
+
+ %a%
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFLike
+
+
+ like
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ acc
+
+
+
+
+
+
+
+
+
+ abc
+
+
+
+
+
+
+
+
+
+ abb
+
+
+
+
+
+
+
+
+
+ hive
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+ _col2
+
+
+ _col3
+
+
+ _col4
+
+
+ _col5
+
+
+ _col6
+
+
+ _col7
+
+
+ _col8
+
+
+ _col9
+
+
+ _col10
+
+
+ _col11
+
+
+ _col12
+
+
+ _col13
+
+
+ _col14
+
+
+ _col15
+
+
+ _col16
+
+
+
+
+
+
+ SEL_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _c0
+
+
+ _col0
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c1
+
+
+ _col1
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c2
+
+
+ _col2
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c3
+
+
+ _col3
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c4
+
+
+ _col4
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c5
+
+
+ _col5
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c6
+
+
+ _col6
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c7
+
+
+ _col7
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c8
+
+
+ _col8
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c9
+
+
+ _col9
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c10
+
+
+ _col10
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c11
+
+
+ _col11
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c12
+
+
+ _col12
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+ _c13
+
+
+ _col13
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _c14
+
+
+ _col14
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _c15
+
+
+ _col15
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _c16
+
+
+ _col16
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+ 86
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ FIL_4
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+ key
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/udf4.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/udf4.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/udf4.q.xml (working copy)
@@ -1,1820 +1,1932 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- dest1
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.dest1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct dest1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.dest1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct dest1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- dest1
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16,_col17,_col18
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- double:double:double:bigint:bigint:bigint:double:double:double:bigint:bigint:bigint:bigint:double:int:int:int:int:int
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
- double
-
-
-
-
- double
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
- _col2
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
- _col3
-
-
-
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- _col4
-
-
-
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _col5
-
-
-
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _col6
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
- _col7
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
- _col8
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
- _col9
-
-
-
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _col10
-
-
-
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _col11
-
-
-
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _col12
-
-
-
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _col13
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
- _col14
-
-
-
-
-
-
-
- int
-
-
-
-
- int
-
-
-
-
-
-
- _col15
-
-
-
-
-
-
-
-
- int
-
-
-
-
-
-
- _col16
-
-
-
-
-
-
-
-
- int
-
-
-
-
-
-
- _col17
-
-
-
-
-
-
-
-
- int
-
-
-
-
-
-
- _col18
-
-
-
-
-
-
-
-
- int
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col8
-
-
-
-
-
-
-
-
-
- 0.0
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFSqrt
-
-
- sqrt
-
-
-
-
-
-
-
-
-
- _col7
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1.0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFSqrt
-
-
- sqrt
-
-
-
-
-
-
-
-
-
- _col6
-
-
-
-
-
-
-
-
-
- 1.0
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFSqrt
-
-
- sqrt
-
-
-
-
-
-
-
-
-
- _col5
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1.5
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col4
-
-
-
-
-
-
-
-
-
- 1.5
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col3
-
-
-
-
-
-
-
-
-
- 1.0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1.5
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
-
- 1.5
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col9
-
-
-
-
-
-
-
-
-
- 1.0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col13
-
-
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFRand
-
-
- rand
-
-
-
-
-
-
-
-
-
- _col12
-
-
-
-
-
-
-
-
-
- 1.0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col11
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1.5
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col10
-
-
-
-
-
-
-
-
-
- 1.5
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col17
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col16
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col15
-
-
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col14
-
-
-
-
-
- 3
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
- 1.0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col18
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
- true
-
-
- org.apache.hadoop.hive.ql.udf.UDFOPBitNot
-
-
- ~
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
- _col2
-
-
- _col3
-
-
- _col4
-
-
- _col5
-
-
- _col6
-
-
- _col7
-
-
- _col8
-
-
- _col9
-
-
- _col10
-
-
- _col11
-
-
- _col12
-
-
- _col13
-
-
- _col14
-
-
- _col15
-
-
- _col16
-
-
- _col17
-
-
- _col18
-
-
-
-
-
-
- SEL_1
-
-
-
-
-
-
-
-
-
-
-
-
-
- _c0
-
-
- _col0
-
-
-
-
-
- double
-
-
-
-
-
-
- _c1
-
-
- _col1
-
-
-
-
-
- double
-
-
-
-
-
-
- _c2
-
-
- _col2
-
-
-
-
-
- double
-
-
-
-
-
-
- _c3
-
-
- _col3
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _c4
-
-
- _col4
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _c5
-
-
- _col5
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _c6
-
-
- _col6
-
-
-
-
-
- double
-
-
-
-
-
-
- _c7
-
-
- _col7
-
-
-
-
-
- double
-
-
-
-
-
-
- _c8
-
-
- _col8
-
-
-
-
-
- double
-
-
-
-
-
-
- _c9
-
-
- _col9
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _c10
-
-
- _col10
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _c11
-
-
- _col11
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _c12
-
-
- _col12
-
-
-
-
-
- bigint
-
-
-
-
-
-
- _c13
-
-
- _col13
-
-
-
-
-
- double
-
-
-
-
-
-
- _c14
-
-
- _col14
-
-
-
-
-
- int
-
-
-
-
-
-
- _c15
-
-
- _col15
-
-
-
-
-
- int
-
-
-
-
-
-
- _c16
-
-
- _col16
-
-
-
-
-
- int
-
-
-
-
-
-
- _c17
-
-
- _col17
-
-
-
-
-
- int
-
-
-
-
-
-
- _c18
-
-
- _col18
-
-
-
-
-
- int
-
-
-
-
-
-
-
-
-
-
-
-
-
- dest1
-
-
-
-
-
-
-
-
-
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
- key
-
-
- dest1
-
-
-
-
- string
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- dest1
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- dest1
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- dest1
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- dest1
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- dest1
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.dest1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct dest1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.dest1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct dest1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ dest1
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.dest1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct dest1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.dest1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct dest1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ dest1
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16,_col17,_col18
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ double:double:double:bigint:bigint:bigint:double:double:double:bigint:bigint:bigint:bigint:double:int:int:int:int:int
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+ double
+
+
+
+
+ double
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _col2
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _col3
+
+
+
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _col4
+
+
+
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _col5
+
+
+
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _col6
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _col7
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _col8
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _col9
+
+
+
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _col10
+
+
+
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _col11
+
+
+
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _col12
+
+
+
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _col13
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _col14
+
+
+
+
+
+
+
+ int
+
+
+
+
+ int
+
+
+
+
+
+
+ _col15
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _col16
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _col17
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _col18
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col8
+
+
+
+
+
+
+
+
+
+ 0.0
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFSqrt
+
+
+ sqrt
+
+
+
+
+
+
+
+
+
+ _col7
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFSqrt
+
+
+ sqrt
+
+
+
+
+
+
+
+
+
+ _col6
+
+
+
+
+
+
+
+
+
+ 1.0
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFSqrt
+
+
+ sqrt
+
+
+
+
+
+
+
+
+
+ _col5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1.5
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col4
+
+
+
+
+
+
+
+
+
+ 1.5
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col3
+
+
+
+
+
+
+
+
+
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1.5
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+ 1.5
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col9
+
+
+
+
+
+
+
+
+
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col13
+
+
+
+
+
+
+
+
+
+ 3
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFRand
+
+
+ rand
+
+
+
+
+
+
+
+
+
+ _col12
+
+
+
+
+
+
+
+
+
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col11
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1.5
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col10
+
+
+
+
+
+
+
+
+
+ 1.5
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col17
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col16
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col15
+
+
+
+
+
+
+
+
+
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col14
+
+
+
+
+
+ 3
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col18
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ true
+
+
+ org.apache.hadoop.hive.ql.udf.UDFOPBitNot
+
+
+ ~
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1.0
+
+
+
+
+
+
+
+
+
+ 2.0
+
+
+
+
+
+
+
+
+
+ -2.0
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+ -2
+
+
+
+
+
+
+
+
+
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.0
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+ -1
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -3
+
+
+
+
+
+
+
+
+
+ 3
+
+
+
+
+
+
+
+
+
+ -1
+
+
+
+
+
+
+
+
+
+ -2
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+ _col2
+
+
+ _col3
+
+
+ _col4
+
+
+ _col5
+
+
+ _col6
+
+
+ _col7
+
+
+ _col8
+
+
+ _col9
+
+
+ _col10
+
+
+ _col11
+
+
+ _col12
+
+
+ _col13
+
+
+ _col14
+
+
+ _col15
+
+
+ _col16
+
+
+ _col17
+
+
+ _col18
+
+
+
+
+
+
+ SEL_1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _c0
+
+
+ _col0
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _c1
+
+
+ _col1
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _c2
+
+
+ _col2
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _c3
+
+
+ _col3
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _c4
+
+
+ _col4
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _c5
+
+
+ _col5
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _c6
+
+
+ _col6
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _c7
+
+
+ _col7
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _c8
+
+
+ _col8
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _c9
+
+
+ _col9
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _c10
+
+
+ _col10
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _c11
+
+
+ _col11
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _c12
+
+
+ _col12
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ _c13
+
+
+ _col13
+
+
+
+
+
+ double
+
+
+
+
+
+
+ _c14
+
+
+ _col14
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _c15
+
+
+ _col15
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _c16
+
+
+ _col16
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _c17
+
+
+ _col17
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _c18
+
+
+ _col18
+
+
+
+
+
+ int
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dest1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+ key
+
+
+ dest1
+
+
+
+
+ string
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ dest1
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ dest1
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ dest1
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ dest1
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ dest1
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.dest1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct dest1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.dest1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct dest1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/udf6.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/udf6.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/udf6.q.xml (working copy)
@@ -1,725 +1,739 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0,_col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- string:int
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
- string
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
- int
-
-
-
-
- int
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
- true
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
- a
-
-
-
-
-
-
-
-
-
- b
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_1
-
-
-
-
-
-
-
-
-
-
-
-
-
- _c0
-
-
- _col0
-
-
-
-
-
- string
-
-
-
-
-
-
- _c1
-
-
- _col1
-
-
-
-
-
- int
-
-
-
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0,_col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ string:int
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+ string
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+ int
+
+
+
+
+ int
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+ a
+
+
+
+
+
+
+
+
+
+ b
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ab
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _c0
+
+
+ _col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _c1
+
+
+ _col1
+
+
+
+
+
+ int
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/groupby1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/groupby1.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/groupby1.q.xml (working copy)
@@ -1,1466 +1,1470 @@
-
-#### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- Stage-2
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
-
-
- HIVE_DEFAULT_LIST_BUCKETING_KEY
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.dest1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct dest1 { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
- KEY._col0
-
-
- _col0
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col0
-
-
- _col1
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- _col0
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- double
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- RS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
- KEY._col0
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col0
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum$GenericUDAFSumDouble
-
-
- sum
-
-
-
- PARTIAL1
-
-
-
-
-
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
- int
-
-
-
-
- 5
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFSubstr
-
-
- substr
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0.5
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
- 0.9
-
-
-
- HASH
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- GBY_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- true
-
-
- src
-
-
-
-
- bigint
-
-
-
-
-
-
- INPUT__FILE__NAME
-
-
- INPUT__FILE__NAME
-
-
- true
-
-
- src
-
-
-
-
-
-
-
- value
-
-
- value
-
-
- src
-
-
-
-
-
-
-
- key
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- true
-
-
-
-
- SEL_1
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
- #### A masked pattern was here ####
-
-
-
- true
-
-
-
-
-
- 150
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
-
- 1
-
-
-
-
- FS_6
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- _col1
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_5
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- KEY._col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum$GenericUDAFSumDouble
-
-
- sum
-
-
-
- FINAL
-
-
-
-
-
-
-
- VALUE._col0
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0.5
-
-
-
-
-
-
-
-
-
-
-
-
- 0.9
-
-
-
- MERGEPARTIAL
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- GBY_4
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ Stage-2
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
+
+
+ HIVE_DEFAULT_LIST_BUCKETING_KEY
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.dest1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct dest1 { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+ KEY._col0
+
+
+ _col0
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col0
+
+
+ _col1
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ _col0
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ double
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ RS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ KEY._col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col0
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum$GenericUDAFSumDouble
+
+
+ sum
+
+
+
+ PARTIAL1
+
+
+
+
+
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+ 5
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFSubstr
+
+
+ substr
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.5
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.9
+
+
+
+ HASH
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ GBY_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ true
+
+
+ src
+
+
+
+
+ bigint
+
+
+
+
+
+
+ INPUT__FILE__NAME
+
+
+ INPUT__FILE__NAME
+
+
+ true
+
+
+ src
+
+
+
+
+
+
+
+ value
+
+
+ value
+
+
+ src
+
+
+
+
+
+
+
+ key
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ true
+
+
+
+
+ SEL_1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ true
+
+
+
+
+
+ 150
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+
+ 1
+
+
+
+
+ FS_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ KEY._col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum$GenericUDAFSumDouble
+
+
+ sum
+
+
+
+ FINAL
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.5
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.9
+
+
+
+ MERGEPARTIAL
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ GBY_4
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/udf_case.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/udf_case.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/udf_case.q.xml (working copy)
@@ -1,809 +1,816 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0,_col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- int:int
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
- int
-
-
-
-
- int
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- int
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
- LIM_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _c0
-
-
- _col0
-
-
-
-
-
- int
-
-
-
-
-
-
- _c1
-
-
- _col1
-
-
-
-
-
- int
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
-
- 11
-
-
-
-
-
-
-
-
-
- 12
-
-
-
-
-
-
-
-
-
- 13
-
-
-
-
-
-
-
-
-
- 14
-
-
-
-
-
-
-
-
-
- 15
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
- 5
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
- string
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0,_col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ int:int
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+ int
+
+
+
+
+ int
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+ LIM_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _c0
+
+
+ _col0
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _c1
+
+
+ _col1
+
+
+
+
+
+ int
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+ 11
+
+
+
+
+
+
+
+
+
+ 12
+
+
+
+
+
+
+
+
+
+ 13
+
+
+
+
+
+
+
+
+
+ 14
+
+
+
+
+
+
+
+
+
+ 15
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+ 3
+
+
+
+
+
+
+
+
+
+ 4
+
+
+
+
+
+
+
+
+
+ 5
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+ string
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/groupby4.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/groupby4.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/groupby4.q.xml (working copy)
@@ -1,1135 +1,1139 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
- KEY._col0
-
-
- _col0
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
- -1
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
-
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
-
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- RS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
- KEY._col0
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
- int
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFSubstr
-
-
- substr
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0.5
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
- 0.9
-
-
-
- HASH
-
-
-
-
-
- _col0
-
-
-
-
-
-
- GBY_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- true
-
-
- src
-
-
-
-
- bigint
-
-
-
-
-
-
- INPUT__FILE__NAME
-
-
- INPUT__FILE__NAME
-
-
- true
-
-
- src
-
-
-
-
-
-
-
- value
-
-
- value
-
-
- src
-
-
-
-
-
-
-
- key
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
-
-
- true
-
-
-
-
- SEL_1
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
- 0
-
-
-
-
-
-
- key
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- string
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_6
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
- SEL_5
-
-
-
-
-
-
-
-
-
-
-
-
-
- _c0
-
-
- _col0
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- KEY._col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0.5
-
-
-
-
-
-
-
-
-
-
-
-
- 0.9
-
-
-
- MERGEPARTIAL
-
-
-
-
-
- _col0
-
-
-
-
-
-
- GBY_4
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+ KEY._col0
+
+
+ _col0
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -1
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ RS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ KEY._col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFSubstr
+
+
+ substr
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.5
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.9
+
+
+
+ HASH
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+ GBY_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ true
+
+
+ src
+
+
+
+
+ bigint
+
+
+
+
+
+
+ INPUT__FILE__NAME
+
+
+ INPUT__FILE__NAME
+
+
+ true
+
+
+ src
+
+
+
+
+
+
+
+ value
+
+
+ value
+
+
+ src
+
+
+
+
+
+
+
+ key
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+
+
+ true
+
+
+
+
+ SEL_1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+ 0
+
+
+
+
+
+
+ key
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+ SEL_5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _c0
+
+
+ _col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ KEY._col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.5
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.9
+
+
+
+ MERGEPARTIAL
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+ GBY_4
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/groupby5.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/groupby5.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/groupby5.q.xml (working copy)
@@ -1,1345 +1,1349 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
- KEY._col0
-
-
- _col0
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col0
-
-
- _col1
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- _col0
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
- double
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- RS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
- KEY._col0
-
-
-
-
-
- string
-
-
-
-
-
-
- VALUE._col0
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum$GenericUDAFSumDouble
-
-
- sum
-
-
-
- PARTIAL1
-
-
-
-
-
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
- int
-
-
-
-
- 5
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFSubstr
-
-
- substr
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0.5
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
- 0.9
-
-
-
- HASH
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- GBY_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- true
-
-
- src
-
-
-
-
- bigint
-
-
-
-
-
-
- INPUT__FILE__NAME
-
-
- INPUT__FILE__NAME
-
-
- true
-
-
- src
-
-
-
-
-
-
-
- value
-
-
- value
-
-
- src
-
-
-
-
-
-
-
- key
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- true
-
-
-
-
- SEL_1
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
- 0
-
-
- 1
-
-
-
-
-
-
- key
-
-
- value
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0,_col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- string:double
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_6
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
- _col1
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_5
-
-
-
-
-
-
-
-
-
-
-
-
-
- key
-
-
- _col0
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
- _c1
-
-
- _col1
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- KEY._col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum$GenericUDAFSumDouble
-
-
- sum
-
-
-
- FINAL
-
-
-
-
-
-
-
- VALUE._col0
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0.5
-
-
-
-
-
-
-
-
-
-
-
-
- 0.9
-
-
-
- MERGEPARTIAL
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- GBY_4
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- double
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+ KEY._col0
+
+
+ _col0
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col0
+
+
+ _col1
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ _col0
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+ double
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ RS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ KEY._col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+ VALUE._col0
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum$GenericUDAFSumDouble
+
+
+ sum
+
+
+
+ PARTIAL1
+
+
+
+
+
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+ 5
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFSubstr
+
+
+ substr
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.5
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.9
+
+
+
+ HASH
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ GBY_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ true
+
+
+ src
+
+
+
+
+ bigint
+
+
+
+
+
+
+ INPUT__FILE__NAME
+
+
+ INPUT__FILE__NAME
+
+
+ true
+
+
+ src
+
+
+
+
+
+
+
+ value
+
+
+ value
+
+
+ src
+
+
+
+
+
+
+
+ key
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ true
+
+
+
+
+ SEL_1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+ 0
+
+
+ 1
+
+
+
+
+
+
+ key
+
+
+ value
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0,_col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ string:double
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ key
+
+
+ _col0
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _c1
+
+
+ _col1
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ KEY._col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum$GenericUDAFSumDouble
+
+
+ sum
+
+
+
+ FINAL
+
+
+
+
+
+
+
+ VALUE._col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.5
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.9
+
+
+
+ MERGEPARTIAL
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ GBY_4
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ double
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/groupby6.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/groupby6.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/groupby6.q.xml (working copy)
@@ -1,1135 +1,1139 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
- KEY._col0
-
-
- _col0
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
-
-
- serialization.sort.order
- +
-
-
- columns.types
- string
-
-
-
-
-
-
- 1
-
-
- -1
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
- -1
-
-
-
-
-
-
-
- org.apache.hadoop.mapred.SequenceFileInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
-
-
-
- columns
-
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-
- columns.types
-
-
-
- escape.delim
- \
-
-
-
-
-
-
-
-
- RS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
- KEY._col0
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
- int
-
-
-
-
- 5
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
- org.apache.hadoop.hive.ql.udf.UDFSubstr
-
-
- substr
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0.5
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
- 0.9
-
-
-
- HASH
-
-
-
-
-
- _col0
-
-
-
-
-
-
- GBY_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- true
-
-
- src
-
-
-
-
- bigint
-
-
-
-
-
-
- INPUT__FILE__NAME
-
-
- INPUT__FILE__NAME
-
-
- true
-
-
- src
-
-
-
-
-
-
-
- value
-
-
- value
-
-
- src
-
-
-
-
-
-
-
- key
-
-
- key
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
- value
-
-
-
-
- true
-
-
-
-
- SEL_1
-
-
-
-
-
-
-
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- value
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src
-
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- -1
-
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- string
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_6
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
- SEL_5
-
-
-
-
-
-
-
-
-
-
-
-
-
- _c0
-
-
- _col0
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- KEY._col0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0.5
-
-
-
-
-
-
-
-
-
-
-
-
- 0.9
-
-
-
- MERGEPARTIAL
-
-
-
-
-
- _col0
-
-
-
-
-
-
- GBY_4
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+ KEY._col0
+
+
+ _col0
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe
+
+
+ serialization.sort.order
+ +
+
+
+ columns.types
+ string
+
+
+
+
+
+
+ 1
+
+
+ -1
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -1
+
+
+
+
+
+
+
+ org.apache.hadoop.mapred.SequenceFileInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+
+
+
+ columns
+
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+
+ columns.types
+
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+
+
+ RS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ KEY._col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+ 5
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+ org.apache.hadoop.hive.ql.udf.UDFSubstr
+
+
+ substr
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.5
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.9
+
+
+
+ HASH
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+ GBY_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ true
+
+
+ src
+
+
+
+
+ bigint
+
+
+
+
+
+
+ INPUT__FILE__NAME
+
+
+ INPUT__FILE__NAME
+
+
+ true
+
+
+ src
+
+
+
+
+
+
+
+ value
+
+
+ value
+
+
+ src
+
+
+
+
+
+
+
+ key
+
+
+ key
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+ value
+
+
+
+
+ true
+
+
+
+
+ SEL_1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+ value
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src
+
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ string
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_6
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+ SEL_5
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _c0
+
+
+ _col0
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ KEY._col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.5
+
+
+
+
+
+
+
+
+
+
+
+
+ 0.9
+
+
+
+ MERGEPARTIAL
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+ GBY_4
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/compiler/plan/udf_when.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/udf_when.q.xml (revision 1558643)
+++ ql/src/test/results/compiler/plan/udf_when.q.xml (working copy)
@@ -1,889 +1,896 @@
-
-#### A masked pattern was here ####
-
-
- Stage-1
-
-
-
-
- true
-
-
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
-
- 1
-
-
- #### A masked pattern was here ####
-
-
- true
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- hive.serialization.extend.nesting.levels
- true
-
-
- columns
- _col0,_col1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- serialization.format
- 1
-
-
- columns.types
- int:int
-
-
- escape.delim
- \
-
-
-
-
-
-
- 1
-
-
-
-
- FS_3
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
- int
-
-
-
-
- int
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
- int
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
- LIM_2
-
-
-
-
-
-
-
-
-
-
-
-
-
- _c0
-
-
- _col0
-
-
-
-
-
- int
-
-
-
-
-
-
- _c1
-
-
- _col1
-
-
-
-
-
- int
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col1
-
-
-
-
-
-
-
-
-
-
-
-
-
- 12
-
-
-
-
-
-
-
-
-
- 11
-
-
-
-
-
-
-
-
-
-
-
- boolean
-
-
-
-
-
-
-
-
-
-
-
- 13
-
-
-
-
-
-
-
-
-
-
-
-
-
- 14
-
-
-
-
-
-
-
-
-
- 10
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 15
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
-
-
- 5
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
- 5
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- _col0
-
-
- _col1
-
-
-
-
-
-
- SEL_1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
-
-
-
-
- TS_0
-
-
-
-
-
-
-
-
- key
-
-
- src
-
-
-
-
- string
-
-
-
-
- string
-
-
-
-
-
-
- value
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
- true
-
-
- BLOCK__OFFSET__INSIDE__FILE
-
-
- src
-
-
-
-
- bigint
-
-
-
-
- bigint
-
-
-
-
-
-
- true
-
-
- INPUT__FILE__NAME
-
-
- src
-
-
-
-
-
- string
-
-
-
-
-
-
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
-
-
-
-
- #### A masked pattern was here ####
-
-
- src
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
- org.apache.hadoop.mapred.TextInputFormat
-
-
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
-
-
- name
- default.src
-
-
- numFiles
- 1
-
-
- columns.types
- string:string
-
-
- serialization.ddl
- struct src { string key, string value}
-
-
- serialization.format
- 1
-
-
- columns
- key,value
-
-
- rawDataSize
- 0
-
-
- numRows
- 0
-
-
- bucket_count
- -1
-
-
- serialization.lib
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-
- COLUMN_STATS_ACCURATE
- true
-
-
- file.inputformat
- org.apache.hadoop.mapred.TextInputFormat
-
-
- totalSize
- 5812
-
-
- file.outputformat
- org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
-
- location
- #### A masked pattern was here ####
-
-
- transient_lastDdlTime
- #### A masked pattern was here ####
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+#### A masked pattern was here ####
+
+
+ Stage-1
+
+
+
+
+ true
+
+
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+
+ 1
+
+
+ #### A masked pattern was here ####
+
+
+ true
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ hive.serialization.extend.nesting.levels
+ true
+
+
+ columns
+ _col0,_col1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ serialization.format
+ 1
+
+
+ columns.types
+ int:int
+
+
+ escape.delim
+ \
+
+
+
+
+
+
+ 1
+
+
+
+
+ FS_3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+ int
+
+
+
+
+ int
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+ int
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+ LIM_2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _c0
+
+
+ _col0
+
+
+
+
+
+ int
+
+
+
+
+
+
+ _c1
+
+
+ _col1
+
+
+
+
+
+ int
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 12
+
+
+
+
+
+
+
+
+
+ 11
+
+
+
+
+
+
+
+
+
+
+
+ boolean
+
+
+
+
+
+
+
+
+
+
+
+ 13
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 14
+
+
+
+
+
+
+
+
+
+ 10
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 15
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 3
+
+
+
+
+
+
+
+
+
+ 5
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4
+
+
+
+
+
+
+
+
+
+ 5
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+ _col0
+
+
+ _col1
+
+
+
+
+
+
+ SEL_1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
+
+
+
+
+ TS_0
+
+
+
+
+
+
+
+
+ key
+
+
+ src
+
+
+
+
+ string
+
+
+
+
+ string
+
+
+
+
+
+
+ value
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+ true
+
+
+ BLOCK__OFFSET__INSIDE__FILE
+
+
+ src
+
+
+
+
+ bigint
+
+
+
+
+ bigint
+
+
+
+
+
+
+ true
+
+
+ INPUT__FILE__NAME
+
+
+ src
+
+
+
+
+
+ string
+
+
+
+
+
+
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+
+
+
+
+ #### A masked pattern was here ####
+
+
+ src
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+
+
+ name
+ default.src
+
+
+ numFiles
+ 1
+
+
+ columns.types
+ string:string
+
+
+ serialization.ddl
+ struct src { string key, string value}
+
+
+ serialization.format
+ 1
+
+
+ columns
+ key,value
+
+
+ rawDataSize
+ 0
+
+
+ numRows
+ 0
+
+
+ bucket_count
+ -1
+
+
+ serialization.lib
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+
+ COLUMN_STATS_ACCURATE
+ true
+
+
+ file.inputformat
+ org.apache.hadoop.mapred.TextInputFormat
+
+
+ totalSize
+ 5812
+
+
+ file.outputformat
+ org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+
+ location
+ #### A masked pattern was here ####
+
+
+ transient_lastDdlTime
+ #### A masked pattern was here ####
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: ql/src/test/results/clientpositive/input18.q.out
===================================================================
--- ql/src/test/results/clientpositive/input18.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/input18.q.out (working copy)
@@ -42,9 +42,9 @@
type: string
expr: value
type: string
- expr: (1 + 2)
+ expr: 3
type: int
- expr: (3 + 4)
+ expr: 7
type: int
outputColumnNames: _col0, _col1, _col2, _col3
Transform Operator
Index: ql/src/test/results/clientpositive/ppd_clusterby.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_clusterby.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/ppd_clusterby.q.out (working copy)
@@ -24,22 +24,20 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: string
expr: value
type: string
- outputColumnNames: _col0, _col1
+ outputColumnNames: _col1
Reduce Output Operator
key expressions:
- expr: _col0
+ expr: '10'
type: string
sort order: +
Map-reduce partition columns:
- expr: _col0
+ expr: '10'
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '10'
type: string
expr: _col1
type: string
@@ -93,16 +91,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 0
value expressions:
- expr: key
- type: string
expr: value
type: string
y
@@ -114,45 +110,34 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 1
- value expressions:
- expr: key
- type: string
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0}
+ 0 {VALUE._col1}
+ 1
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col4
- Filter Operator
- predicate:
- expr: (_col0 = 20)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col4
- type: string
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ outputColumnNames: _col1
+ Select Operator
+ expressions:
+ expr: _col1
+ type: string
+ outputColumnNames: _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
@@ -169,11 +154,11 @@
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '20'
type: string
Reduce Operator Tree:
Extract
@@ -224,22 +209,20 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: string
expr: value
type: string
- outputColumnNames: _col0, _col1
+ outputColumnNames: _col1
Reduce Output Operator
key expressions:
- expr: _col0
+ expr: '10'
type: string
sort order: +
Map-reduce partition columns:
- expr: _col0
+ expr: '10'
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '10'
type: string
expr: _col1
type: string
@@ -293,16 +276,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 0
value expressions:
- expr: key
- type: string
expr: value
type: string
y
@@ -314,34 +295,27 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 1
- value expressions:
- expr: key
- type: string
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0}
+ 0 {VALUE._col1}
+ 1
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col4
+ outputColumnNames: _col1
Select Operator
expressions:
- expr: _col0
- type: string
expr: _col1
type: string
- expr: _col4
- type: string
- outputColumnNames: _col0, _col1, _col2
+ outputColumnNames: _col1
File Output Operator
compressed: false
GlobalTableId: 0
@@ -365,11 +339,11 @@
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '20'
type: string
Reduce Operator Tree:
Extract
Index: ql/src/test/results/clientpositive/vectorized_math_funcs.q.out
===================================================================
--- ql/src/test/results/clientpositive/vectorized_math_funcs.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/vectorized_math_funcs.q.out (working copy)
@@ -122,7 +122,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: (((cbigint % 500) = 0) and (sin(cfloat) >= (- 1.0)))
+ expr: (((cbigint % 500) = 0) and (sin(cfloat) >= -1.0))
type: boolean
Vectorized execution: true
Select Operator
Index: ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (working copy)
@@ -45,21 +45,17 @@
Processor Tree:
TableScan
alias: src
- Filter Operator
- predicate:
- expr: true is not null
- type: boolean
- Select Operator
- expressions:
- expr: null is null
- type: boolean
- expr: 1 is not null
- type: boolean
- expr: 'my string' is not null
- type: boolean
- outputColumnNames: _col0, _col1, _col2
- Limit
- ListSink
+ Select Operator
+ expressions:
+ expr: null is null
+ type: boolean
+ expr: true
+ type: boolean
+ expr: true
+ type: boolean
+ outputColumnNames: _col0, _col1, _col2
+ Limit
+ ListSink
PREHOOK: query: SELECT NULL IS NULL,
1 IS NOT NULL,
Index: ql/src/test/results/clientpositive/udf_repeat.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_repeat.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_repeat.q.out (working copy)
@@ -41,13 +41,13 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: repeat('Facebook', 3)
+ expr: 'FacebookFacebookFacebook'
type: string
- expr: repeat('', 4)
+ expr: ''
type: string
- expr: repeat('asd', 0)
+ expr: ''
type: string
- expr: repeat('asdf', (- 1))
+ expr: ''
type: string
outputColumnNames: _col0, _col1, _col2, _col3
ListSink
Index: ql/src/test/results/clientpositive/udf_reflect2.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_reflect2.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_reflect2.q.out (working copy)
@@ -101,7 +101,7 @@
type: int
expr: value
type: string
- expr: CAST( '2013-02-15 19:41:20' AS TIMESTAMP)
+ expr: 2013-02-15 19:41:20.0
type: timestamp
outputColumnNames: _col0, _col1, _col2
Statistics:
Index: ql/src/test/results/clientpositive/constprog1.q.out
===================================================================
--- ql/src/test/results/clientpositive/constprog1.q.out (revision 0)
+++ ql/src/test/results/clientpositive/constprog1.q.out (revision 0)
@@ -0,0 +1,40 @@
+PREHOOK: query: EXPLAIN
+SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF (> (TOK_FUNCTION INSTR (TOK_FUNCTION CONCAT 'foo' 'bar') 'foob') 0) "F1" "B1")))))
+
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: src
+ Row Limit Per Split: 1
+ Select Operator
+ expressions:
+ expr: 'F1'
+ type: string
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+F1
Index: ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out
===================================================================
--- ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out (working copy)
@@ -3640,7 +3640,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '0'
type: string
expr: value
type: string
@@ -3658,7 +3658,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '2'
type: string
expr: value
type: string
@@ -3676,7 +3676,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '4'
type: string
expr: value
type: string
@@ -3817,7 +3817,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '0'
type: string
expr: value
type: string
@@ -3835,7 +3835,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '2'
type: string
expr: value
type: string
@@ -3853,7 +3853,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '4'
type: string
expr: value
type: string
@@ -3994,7 +3994,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '0'
type: string
expr: value
type: string
@@ -4012,7 +4012,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '2'
type: string
expr: value
type: string
@@ -4030,7 +4030,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '4'
type: string
expr: value
type: string
@@ -4171,7 +4171,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '0'
type: string
expr: value
type: string
@@ -4189,7 +4189,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '2'
type: string
expr: value
type: string
@@ -4207,7 +4207,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '4'
type: string
expr: value
type: string
Index: ql/src/test/results/clientpositive/ppd_repeated_alias.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_repeated_alias.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/ppd_repeated_alias.q.out (working copy)
@@ -92,7 +92,7 @@
type: int
expr: _col5
type: int
- expr: _col6
+ expr: 3
type: int
outputColumnNames: _col0, _col1, _col2
File Output Operator
@@ -246,8 +246,6 @@
value expressions:
expr: foo
type: int
- expr: bar
- type: int
a:b
TableScan
alias: b
@@ -268,17 +266,17 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
+ 0 {VALUE._col0}
1 {VALUE._col0}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col5
+ outputColumnNames: _col0, _col5
Select Operator
expressions:
expr: _col0
type: int
expr: _col5
type: int
- expr: _col1
+ expr: 3
type: int
outputColumnNames: _col0, _col1, _col2
File Output Operator
Index: ql/src/test/results/clientpositive/udf_case.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_case.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_case.q.out (working copy)
@@ -80,17 +80,17 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: CASE (1) WHEN (1) THEN (2) WHEN (3) THEN (4) ELSE (5) END
+ expr: 2
type: int
- expr: CASE (2) WHEN (1) THEN (2) ELSE (5) END
+ expr: 5
type: int
- expr: CASE (14) WHEN (12) THEN (13) WHEN (14) THEN (15) END
+ expr: 15
type: int
- expr: CASE (16) WHEN (12) THEN (13) WHEN (14) THEN (15) END
- type: int
+ expr: null
+ type: void
expr: CASE (17) WHEN (18) THEN (null) WHEN (17) THEN (20) END
type: int
- expr: CASE (21) WHEN (22) THEN (23) WHEN (21) THEN (24) END
+ expr: 24
type: int
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
ListSink
Index: ql/src/test/results/clientpositive/query_result_fileformat.q.out
===================================================================
--- ql/src/test/results/clientpositive/query_result_fileformat.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/query_result_fileformat.q.out (working copy)
@@ -58,7 +58,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 'key1'
type: string
expr: value
type: string
@@ -134,7 +134,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 'key1'
type: string
expr: value
type: string
Index: ql/src/test/results/clientpositive/set_processor_namespaces.q.out
===================================================================
--- ql/src/test/results/clientpositive/set_processor_namespaces.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/set_processor_namespaces.q.out (working copy)
@@ -27,7 +27,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '5'
type: string
expr: value
type: string
Index: ql/src/test/results/clientpositive/smb_mapjoin_18.q.out
===================================================================
--- ql/src/test/results/clientpositive/smb_mapjoin_18.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/smb_mapjoin_18.q.out (working copy)
@@ -289,19 +289,33 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: int
expr: value
type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.test_table2
+ outputColumnNames: _col1
+ Reduce Output Operator
+ key expressions:
+ expr: 238
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: 238
+ type: int
+ tag: -1
+ value expressions:
+ expr: 238
+ type: int
+ expr: _col1
+ type: string
+ Reduce Operator Tree:
+ Extract
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test_table2
Stage: Stage-0
Move Operator
Index: ql/src/test/results/clientpositive/literal_double.q.out
===================================================================
--- ql/src/test/results/clientpositive/literal_double.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/literal_double.q.out (working copy)
@@ -19,15 +19,15 @@
expressions:
expr: 3.14
type: double
- expr: (- 3.14)
+ expr: -3.14
type: double
expr: 3.14E8
type: double
expr: 3.14E-8
type: double
- expr: (- 3.14E8)
+ expr: -3.14E8
type: double
- expr: (- 3.14E-8)
+ expr: -3.14E-8
type: double
expr: 3.14E8
type: double
Index: ql/src/test/results/clientpositive/udf_nvl.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_nvl.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_nvl.q.out (working copy)
@@ -37,7 +37,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: if 1 is null returns2
+ expr: 1
type: int
expr: if null is null returns5
type: int
Index: ql/src/test/results/clientpositive/index_stale.q.out
===================================================================
--- ql/src/test/results/clientpositive/index_stale.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/index_stale.q.out (working copy)
@@ -92,7 +92,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '86'
type: string
expr: val
type: string
Index: ql/src/test/results/clientpositive/smb_mapjoin9.q.out
===================================================================
--- ql/src/test/results/clientpositive/smb_mapjoin9.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/smb_mapjoin9.q.out (working copy)
@@ -51,12 +51,12 @@
Inner Join 0 to 1
condition expressions:
0 {key}
- 1 {key} {value} {ds}
+ 1 {key} {value}
handleSkewJoin: false
keys:
0 [Column[key]]
1 [Column[key]]
- outputColumnNames: _col0, _col5, _col6, _col7
+ outputColumnNames: _col0, _col5, _col6
Position of Big Table: 0
Select Operator
expressions:
@@ -64,7 +64,7 @@
type: int
expr: _col6
type: string
- expr: _col7
+ expr: '2010-10-15'
type: string
expr: _col0
type: int
@@ -151,12 +151,12 @@
Inner Join 0 to 1
condition expressions:
0 {key}
- 1 {key} {value} {ds}
+ 1 {key} {value}
handleSkewJoin: false
keys:
0 [Column[key]]
1 [Column[key]]
- outputColumnNames: _col0, _col5, _col6, _col7
+ outputColumnNames: _col0, _col5, _col6
Position of Big Table: 1
Select Operator
expressions:
@@ -164,7 +164,7 @@
type: int
expr: _col6
type: string
- expr: _col7
+ expr: '2010-10-15'
type: string
expr: _col0
type: int
Index: ql/src/test/results/clientpositive/quote1.q.out
===================================================================
--- ql/src/test/results/clientpositive/quote1.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/quote1.q.out (working copy)
@@ -139,7 +139,7 @@
type: int
expr: type
type: string
- expr: table
+ expr: '2008-04-08'
type: string
outputColumnNames: _col0, _col1, _col2
File Output Operator
Index: ql/src/test/results/clientpositive/type_cast_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/type_cast_1.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/type_cast_1.q.out (working copy)
@@ -19,7 +19,7 @@
alias: src
Select Operator
expressions:
- expr: (if(false, 1, UDFToShort(2)) + 3)
+ expr: 5
type: int
outputColumnNames: _col0
Limit
Index: ql/src/test/results/clientpositive/ppd2.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd2.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/ppd2.q.out (working copy)
@@ -371,16 +371,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 0
value expressions:
- expr: key
- type: string
expr: value
type: string
y
@@ -392,34 +390,27 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 1
- value expressions:
- expr: key
- type: string
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0}
+ 0 {VALUE._col1}
+ 1
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col4
+ outputColumnNames: _col1
Select Operator
expressions:
- expr: _col0
- type: string
expr: _col1
type: string
- expr: _col4
- type: string
- outputColumnNames: _col0, _col1, _col2
+ outputColumnNames: _col1
File Output Operator
compressed: false
GlobalTableId: 0
@@ -443,11 +434,11 @@
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '20'
type: string
Reduce Operator Tree:
Extract
Index: ql/src/test/results/clientpositive/num_op_type_conv.q.out
===================================================================
--- ql/src/test/results/clientpositive/num_op_type_conv.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/num_op_type_conv.q.out (working copy)
@@ -29,11 +29,11 @@
type: double
expr: (null + null)
type: double
- expr: (UDFToLong(21) % UDFToByte(5))
+ expr: 1
type: bigint
- expr: (UDFToLong(21) % UDFToLong(21))
+ expr: 0
type: bigint
- expr: (9 % '3')
+ expr: 0.0
type: double
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Limit
Index: ql/src/test/results/clientpositive/insert1.q.out
===================================================================
--- ql/src/test/results/clientpositive/insert1.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/insert1.q.out (working copy)
@@ -46,11 +46,11 @@
alias: a
Filter Operator
predicate:
- expr: (key = (- 1))
+ expr: (key = -1)
type: boolean
Select Operator
expressions:
- expr: key
+ expr: -1
type: int
expr: value
type: string
@@ -148,11 +148,11 @@
alias: a
Filter Operator
predicate:
- expr: (key = (- 1))
+ expr: (key = -1)
type: boolean
Select Operator
expressions:
- expr: key
+ expr: -1
type: int
expr: value
type: string
@@ -265,11 +265,11 @@
alias: a
Filter Operator
predicate:
- expr: (key = (- 1))
+ expr: (key = -1)
type: boolean
Select Operator
expressions:
- expr: key
+ expr: -1
type: int
expr: value
type: string
@@ -367,11 +367,11 @@
alias: a
Filter Operator
predicate:
- expr: (key = (- 1))
+ expr: (key = -1)
type: boolean
Select Operator
expressions:
- expr: key
+ expr: -1
type: int
expr: value
type: string
Index: ql/src/test/results/clientpositive/sample8.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample8.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/sample8.q.out (working copy)
@@ -299,11 +299,7 @@
type: string
expr: _col1
type: string
- expr: _col2
- type: string
- expr: _col3
- type: string
- outputColumnNames: _col0, _col1, _col2, _col3
+ outputColumnNames: _col0, _col1
Statistics:
numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
File Output Operator
@@ -315,8 +311,8 @@
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3
- columns.types string,string,string,string
+ columns _col0,_col1
+ columns.types string,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -350,9 +346,9 @@
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '2008-04-08'
type: string
- expr: _col3
+ expr: '11'
type: string
Path -> Alias:
#### A masked pattern was here ####
@@ -363,8 +359,8 @@
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3
- columns.types string,string,string,string
+ columns _col0,_col1
+ columns.types string,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -372,8 +368,8 @@
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
- columns _col0,_col1,_col2,_col3
- columns.types string,string,string,string
+ columns _col0,_col1
+ columns.types string,string
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Index: ql/src/test/results/clientpositive/index_stale_partitioned.q.out
===================================================================
--- ql/src/test/results/clientpositive/index_stale_partitioned.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/index_stale_partitioned.q.out (working copy)
@@ -120,7 +120,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '86'
type: string
expr: val
type: string
Index: ql/src/test/results/clientpositive/udf_PI.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_PI.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_PI.q.out (working copy)
@@ -20,7 +20,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: pi()
+ expr: 3.141592653589793
type: double
outputColumnNames: _col0
ListSink
@@ -70,7 +70,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: pi()
+ expr: 3.141592653589793
type: double
outputColumnNames: _col0
ListSink
Index: ql/src/test/results/clientpositive/quotedid_basic.q.out
===================================================================
--- ql/src/test/results/clientpositive/quotedid_basic.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/quotedid_basic.q.out (working copy)
@@ -87,7 +87,7 @@
type: string
expr: y&y
type: string
- expr: !@#$%^&*()_q
+ expr: '1'
type: string
outputColumnNames: _col0, _col1, _col2
File Output Operator
@@ -130,9 +130,7 @@
type: string
expr: y&y
type: string
- expr: !@#$%^&*()_q
- type: string
- outputColumnNames: x+1, y&y, !@#$%^&*()_q
+ outputColumnNames: x+1, y&y
Group By Operator
bucketGroup: false
keys:
@@ -140,7 +138,7 @@
type: string
expr: y&y
type: string
- expr: !@#$%^&*()_q
+ expr: '1'
type: string
mode: hash
outputColumnNames: _col0, _col1, _col2
@@ -225,9 +223,7 @@
type: string
expr: y&y
type: string
- expr: !@#$%^&*()_q
- type: string
- outputColumnNames: x+1, y&y, !@#$%^&*()_q
+ outputColumnNames: x+1, y&y
Group By Operator
bucketGroup: false
keys:
@@ -235,7 +231,7 @@
type: string
expr: y&y
type: string
- expr: !@#$%^&*()_q
+ expr: '1'
type: string
mode: hash
outputColumnNames: _col0, _col1, _col2
@@ -358,9 +354,7 @@
type: string
expr: y&y
type: string
- expr: !@#$%^&*()_q
- type: string
- outputColumnNames: x+1, y&y, !@#$%^&*()_q
+ outputColumnNames: x+1, y&y
Group By Operator
bucketGroup: false
keys:
@@ -368,7 +362,7 @@
type: string
expr: y&y
type: string
- expr: !@#$%^&*()_q
+ expr: '1'
type: string
mode: hash
outputColumnNames: _col0, _col1, _col2
Index: ql/src/test/results/clientpositive/input38.q.out
===================================================================
--- ql/src/test/results/clientpositive/input38.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/input38.q.out (working copy)
@@ -45,9 +45,9 @@
type: string
expr: value
type: string
- expr: (1 + 2)
+ expr: 3
type: int
- expr: (3 + 4)
+ expr: 7
type: int
outputColumnNames: _col0, _col1, _col2, _col3
Transform Operator
Index: ql/src/test/results/clientpositive/subquery_notin_having.q.out
===================================================================
--- ql/src/test/results/clientpositive/subquery_notin_having.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/subquery_notin_having.q.out (working copy)
@@ -209,7 +209,7 @@
outputColumnNames: _col0, _col1, _col4
Filter Operator
predicate:
- expr: ((1 = 1) and _col4 is null)
+ expr: _col4 is null
type: boolean
Select Operator
expressions:
@@ -464,7 +464,7 @@
outputColumnNames: _col0, _col1, _col7
Filter Operator
predicate:
- expr: ((1 = 1) and _col7 is null)
+ expr: _col7 is null
type: boolean
Select Operator
expressions:
@@ -865,7 +865,7 @@
outputColumnNames: _col0, _col1, _col5
Filter Operator
predicate:
- expr: ((1 = 1) and _col5 is null)
+ expr: _col5 is null
type: boolean
Select Operator
expressions:
@@ -894,19 +894,17 @@
type: boolean
Select Operator
expressions:
- expr: p_mfgr
- type: string
expr: p_retailprice
type: double
- outputColumnNames: p_mfgr, p_retailprice
+ outputColumnNames: p_retailprice
Group By Operator
aggregations:
expr: max(p_retailprice)
expr: min(p_retailprice)
bucketGroup: false
keys:
- expr: p_mfgr
- type: string
+ expr: null
+ type: void
mode: hash
outputColumnNames: _col0, _col1, _col2
Reduce Output Operator
Index: ql/src/test/results/clientpositive/udf_instr.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_instr.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_instr.q.out (working copy)
@@ -59,27 +59,27 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: instr('abcd''abc')
+ expr: 1
type: int
- expr: instr('abcabc''ccc')
+ expr: 0
type: int
- expr: instr(123'23')
+ expr: 2
type: int
- expr: instr(12323)
+ expr: 2
type: int
- expr: instr(true1)
+ expr: 0
type: int
- expr: instr(false1)
+ expr: 0
type: int
- expr: instr('12345'UDFToByte('2'))
+ expr: 2
type: int
- expr: instr(UDFToShort('12345')'34')
+ expr: 3
type: int
- expr: instr(UDFToLong('123456789012')'456')
+ expr: 4
type: int
- expr: instr(UDFToFloat(1.25)'.25')
+ expr: 2
type: int
- expr: instr(16.0'.0')
+ expr: 3
type: int
expr: instr(null'abc')
type: int
Index: ql/src/test/results/clientpositive/multi_insert.q.out
===================================================================
--- ql/src/test/results/clientpositive/multi_insert.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/multi_insert.q.out (working copy)
@@ -3591,7 +3591,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '0'
type: string
expr: value
type: string
@@ -3609,7 +3609,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '2'
type: string
expr: value
type: string
@@ -3627,7 +3627,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '4'
type: string
expr: value
type: string
@@ -3764,7 +3764,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '0'
type: string
expr: value
type: string
@@ -3782,7 +3782,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '2'
type: string
expr: value
type: string
@@ -3800,7 +3800,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '4'
type: string
expr: value
type: string
@@ -3937,7 +3937,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '0'
type: string
expr: value
type: string
@@ -3955,7 +3955,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '2'
type: string
expr: value
type: string
@@ -3973,7 +3973,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '4'
type: string
expr: value
type: string
@@ -4110,7 +4110,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '0'
type: string
expr: value
type: string
@@ -4128,7 +4128,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '2'
type: string
expr: value
type: string
@@ -4146,7 +4146,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '4'
type: string
expr: value
type: string
Index: ql/src/test/results/clientpositive/input_part2.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part2.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/input_part2.q.out (working copy)
@@ -63,7 +63,7 @@
type: string
expr: hr
type: string
- expr: ds
+ expr: '2008-04-08'
type: string
outputColumnNames: _col0, _col1, _col2, _col3
Statistics:
@@ -109,7 +109,7 @@
type: string
expr: hr
type: string
- expr: ds
+ expr: '2008-04-09'
type: string
outputColumnNames: _col0, _col1, _col2, _col3
Statistics:
Index: ql/src/test/results/clientpositive/ppd_udf_case.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_udf_case.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/ppd_udf_case.q.out (working copy)
@@ -105,19 +105,15 @@
type: string
expr: _col1
type: string
- expr: _col2
- type: string
expr: _col3
type: string
expr: _col6
type: string
expr: _col7
type: string
- expr: _col8
- type: string
expr: _col9
type: string
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
+ outputColumnNames: _col0, _col1, _col3, _col4, _col5, _col7
File Output Operator
compressed: false
GlobalTableId: 0
@@ -137,7 +133,7 @@
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '2008-04-08'
type: string
expr: _col3
type: string
@@ -145,7 +141,7 @@
type: string
expr: _col5
type: string
- expr: _col6
+ expr: '2008-04-08'
type: string
expr: _col7
type: string
@@ -156,7 +152,7 @@
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '2008-04-08'
type: string
expr: _col3
type: string
@@ -164,7 +160,7 @@
type: string
expr: _col5
type: string
- expr: _col6
+ expr: '2008-04-08'
type: string
expr: _col7
type: string
Index: ql/src/test/results/clientpositive/join_nullsafe.q.out
===================================================================
--- ql/src/test/results/clientpositive/join_nullsafe.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/join_nullsafe.q.out (working copy)
@@ -1632,16 +1632,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
- type: int
+ expr: null
+ type: void
sort order: +
Map-reduce partition columns:
- expr: key
- type: int
+ expr: null
+ type: void
tag: 0
value expressions:
- expr: key
- type: int
expr: value
type: int
b
@@ -1653,38 +1651,36 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: value
- type: int
+ expr: null
+ type: void
sort order: +
Map-reduce partition columns:
- expr: value
- type: int
+ expr: null
+ type: void
tag: 1
value expressions:
expr: key
type: int
- expr: value
- type: int
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0} {VALUE._col1}
+ 0 {VALUE._col1}
+ 1 {VALUE._col0}
handleSkewJoin: false
nullSafes: [true]
- outputColumnNames: _col0, _col1, _col4, _col5
+ outputColumnNames: _col1, _col4
Select Operator
expressions:
- expr: _col0
- type: int
+ expr: null
+ type: void
expr: _col1
type: int
expr: _col4
type: int
- expr: _col5
- type: int
+ expr: null
+ type: void
outputColumnNames: _col0, _col1, _col2, _col3
File Output Operator
compressed: false
Index: ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out
===================================================================
--- ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out (working copy)
@@ -1303,7 +1303,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 1
type: int
expr: _count_of_key
type: bigint
@@ -1311,9 +1311,9 @@
Group By Operator
aggregations:
expr: sum(_count_of_key)
- bucketGroup: true
+ bucketGroup: false
keys:
- expr: key
+ expr: 1
type: int
mode: hash
outputColumnNames: _col0, _col1
@@ -1756,14 +1756,10 @@
expr: (key = 3)
type: boolean
Select Operator
- expressions:
- expr: key
- type: int
- outputColumnNames: key
Group By Operator
bucketGroup: false
keys:
- expr: key
+ expr: 3
type: int
mode: hash
outputColumnNames: _col0
@@ -2075,15 +2071,13 @@
expressions:
expr: key
type: int
- expr: value
- type: int
- outputColumnNames: key, value
+ outputColumnNames: key
Group By Operator
bucketGroup: false
keys:
expr: key
type: int
- expr: value
+ expr: 1
type: int
mode: hash
outputColumnNames: _col0, _col1
@@ -2463,15 +2457,13 @@
expressions:
expr: key
type: int
- expr: value
- type: int
- outputColumnNames: key, value
+ outputColumnNames: key
Group By Operator
bucketGroup: false
keys:
expr: key
type: int
- expr: value
+ expr: 2
type: int
mode: hash
outputColumnNames: _col0, _col1
@@ -2548,18 +2540,12 @@
expr: ((value = 2) and (key = 3))
type: boolean
Select Operator
- expressions:
- expr: key
- type: int
- expr: value
- type: int
- outputColumnNames: key, value
Group By Operator
bucketGroup: false
keys:
- expr: key
+ expr: 3
type: int
- expr: value
+ expr: 2
type: int
mode: hash
outputColumnNames: _col0, _col1
@@ -2899,15 +2885,13 @@
expressions:
expr: key
type: int
- expr: value
- type: int
- outputColumnNames: key, value
+ outputColumnNames: key
Group By Operator
bucketGroup: false
keys:
expr: key
type: int
- expr: value
+ expr: 2
type: int
mode: hash
outputColumnNames: _col0, _col1
Index: ql/src/test/results/clientpositive/union33.q.out
===================================================================
--- ql/src/test/results/clientpositive/union33.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/union33.q.out (working copy)
@@ -165,7 +165,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '0'
type: string
expr: value
type: string
@@ -436,7 +436,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '0'
type: string
expr: value
type: string
Index: ql/src/test/results/clientpositive/udf_radians.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_radians.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_radians.q.out (working copy)
@@ -20,7 +20,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: radians(57.2958)
+ expr: 1.000000357564167
type: double
outputColumnNames: _col0
ListSink
@@ -79,7 +79,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: radians(57.2958)
+ expr: 1.000000357564167
type: double
outputColumnNames: _col0
ListSink
Index: ql/src/test/results/clientpositive/udf_10_trims.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_10_trims.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_10_trims.q.out (working copy)
@@ -41,7 +41,7 @@
type: boolean
Select Operator
expressions:
- expr: trim(trim(trim(trim(trim(trim(trim(trim(trim(trim(' abc '))))))))))
+ expr: 'abc'
type: string
outputColumnNames: _col0
File Output Operator
Index: ql/src/test/results/clientpositive/udf_sign.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_sign.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_sign.q.out (working copy)
@@ -20,7 +20,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: sign(0)
+ expr: 0.0
type: double
outputColumnNames: _col0
ListSink
@@ -87,7 +87,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: sign(0)
+ expr: 0.0
type: double
outputColumnNames: _col0
ListSink
Index: ql/src/test/results/clientpositive/subq_where_serialization.q.out
===================================================================
--- ql/src/test/results/clientpositive/subq_where_serialization.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/subq_where_serialization.q.out (working copy)
@@ -109,22 +109,18 @@
1 [Column[_col0]]
outputColumnNames: _col0
Position of Big Table: 0
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- outputColumnNames: _col0
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Local Work:
Map Reduce Local Work
@@ -166,22 +162,18 @@
1
handleSkewJoin: false
outputColumnNames: _col0
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- outputColumnNames: _col0
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
Index: ql/src/test/results/clientpositive/udf5.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf5.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf5.q.out (working copy)
@@ -35,21 +35,21 @@
alias: dest1
Select Operator
expressions:
- expr: from_unixtime(1226446340)
+ expr: '2008-11-11 15:32:20'
type: string
- expr: to_date(from_unixtime(1226446340))
+ expr: '2008-11-11'
type: string
- expr: day('2008-11-01')
+ expr: 1
type: int
- expr: month('2008-11-01')
+ expr: 11
type: int
- expr: year('2008-11-01')
+ expr: 2008
type: int
- expr: day('2008-11-01 15:32:20')
+ expr: 1
type: int
- expr: month('2008-11-01 15:32:20')
+ expr: 11
type: int
- expr: year('2008-11-01 15:32:20')
+ expr: 2008
type: int
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
File Output Operator
Index: ql/src/test/results/clientpositive/subquery_notin.q.out
===================================================================
--- ql/src/test/results/clientpositive/subquery_notin.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/subquery_notin.q.out (working copy)
@@ -256,7 +256,7 @@
outputColumnNames: _col0, _col1, _col4
Filter Operator
predicate:
- expr: ((1 = 1) and _col4 is null)
+ expr: _col4 is null
type: boolean
Select Operator
expressions:
@@ -542,7 +542,7 @@
outputColumnNames: _col1, _col2, _col5, _col11
Filter Operator
predicate:
- expr: ((1 = 1) and _col11 is null)
+ expr: _col11 is null
type: boolean
Select Operator
expressions:
@@ -885,7 +885,7 @@
outputColumnNames: _col1, _col5, _col11
Filter Operator
predicate:
- expr: ((1 = 1) and _col11 is null)
+ expr: _col11 is null
type: boolean
Select Operator
expressions:
@@ -1270,7 +1270,7 @@
outputColumnNames: _col1, _col2, _col5, _col11
Filter Operator
predicate:
- expr: ((1 = 1) and _col11 is null)
+ expr: _col11 is null
type: boolean
Select Operator
expressions:
@@ -1875,7 +1875,7 @@
outputColumnNames: _col0, _col1
Filter Operator
predicate:
- expr: ((1 = 1) and _col1 is null)
+ expr: _col1 is null
type: boolean
Select Operator
expressions:
Index: ql/src/test/results/clientpositive/join_view.q.out
===================================================================
--- ql/src/test/results/clientpositive/join_view.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/join_view.q.out (working copy)
@@ -49,11 +49,11 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: ds
+ expr: '2011-09-01'
type: string
sort order: +
Map-reduce partition columns:
- expr: ds
+ expr: '2011-09-01'
type: string
tag: 0
value expressions:
@@ -68,27 +68,25 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: ds
+ expr: '2011-09-01'
type: string
sort order: +
Map-reduce partition columns:
- expr: ds
+ expr: '2011-09-01'
type: string
tag: 1
value expressions:
expr: foo
type: int
- expr: ds
- type: string
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
0 {VALUE._col1}
- 1 {VALUE._col0} {VALUE._col2}
+ 1 {VALUE._col0}
handleSkewJoin: false
- outputColumnNames: _col1, _col5, _col7
+ outputColumnNames: _col1, _col5
Select Operator
expressions:
expr: _col1
Index: ql/src/test/results/clientpositive/subquery_notexists_having.q.out
===================================================================
--- ql/src/test/results/clientpositive/subquery_notexists_having.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/subquery_notexists_having.q.out (working copy)
@@ -146,7 +146,7 @@
outputColumnNames: _col0, _col1, _col8
Filter Operator
predicate:
- expr: ((1 = 1) and _col8 is null)
+ expr: _col8 is null
type: boolean
Select Operator
expressions:
@@ -332,7 +332,7 @@
outputColumnNames: _col0, _col1, _col6
Filter Operator
predicate:
- expr: ((1 = 1) and _col6 is null)
+ expr: _col6 is null
type: boolean
Select Operator
expressions:
Index: ql/src/test/results/clientpositive/udf_abs.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_abs.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_abs.q.out (working copy)
@@ -45,15 +45,15 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: abs(0)
+ expr: 0
type: int
- expr: abs((- 1))
+ expr: 1
type: int
- expr: abs(123)
+ expr: 123
type: int
- expr: abs((- 9223372036854775807))
+ expr: 9223372036854775807
type: bigint
- expr: abs(9223372036854775807)
+ expr: 9223372036854775807
type: bigint
outputColumnNames: _col0, _col1, _col2, _col3, _col4
ListSink
@@ -107,11 +107,11 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: abs(0.0)
+ expr: 0.0
type: double
- expr: abs((- 3.14159265))
+ expr: 3.14159265
type: double
- expr: abs(3.14159265)
+ expr: 3.14159265
type: double
outputColumnNames: _col0, _col1, _col2
ListSink
Index: ql/src/test/results/clientpositive/input26.q.out
===================================================================
--- ql/src/test/results/clientpositive/input26.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/input26.q.out (working copy)
@@ -127,9 +127,9 @@
type: string
expr: value
type: string
- expr: ds
+ expr: '2008-04-08'
type: string
- expr: hr
+ expr: '14'
type: string
outputColumnNames: _col0, _col1, _col2, _col3
Limit
@@ -141,9 +141,9 @@
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '2008-04-08'
type: string
- expr: _col3
+ expr: '14'
type: string
Reduce Operator Tree:
Extract
Index: ql/src/test/results/clientpositive/udf_E.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_E.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_E.q.out (working copy)
@@ -20,7 +20,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: e()
+ expr: 2.718281828459045
type: double
outputColumnNames: _col0
ListSink
@@ -70,7 +70,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: e()
+ expr: 2.718281828459045
type: double
outputColumnNames: _col0
ListSink
Index: ql/src/test/results/clientpositive/udf_coalesce.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_coalesce.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_coalesce.q.out (working copy)
@@ -69,9 +69,9 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: COALESCE(1)
+ expr: 1
type: int
- expr: COALESCE(1,2)
+ expr: 1
type: int
expr: COALESCE(null,2)
type: int
@@ -81,9 +81,9 @@
type: int
expr: COALESCE(4,null,null,null)
type: int
- expr: COALESCE('1')
+ expr: '1'
type: string
- expr: COALESCE('1','2')
+ expr: '1'
type: string
expr: COALESCE(null,'2')
type: string
@@ -93,9 +93,9 @@
type: string
expr: COALESCE('4',null,null,null)
type: string
- expr: COALESCE(1.0)
+ expr: 1.0
type: double
- expr: COALESCE(1.0,2.0)
+ expr: 1.0
type: double
expr: COALESCE(null,2.0)
type: double
Index: ql/src/test/results/clientpositive/udf_like.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_like.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_like.q.out (working copy)
@@ -42,33 +42,33 @@
type: boolean
Select Operator
expressions:
- expr: ('_%_' like '%\_\%\_%')
+ expr: true
type: boolean
- expr: ('__' like '%\_\%\_%')
+ expr: false
type: boolean
- expr: ('%%_%_' like '%\_\%\_%')
+ expr: true
type: boolean
- expr: ('%_%_%' like '%\%\_\%')
+ expr: true
type: boolean
- expr: ('_%_' like '\%\_%')
+ expr: false
type: boolean
- expr: ('%__' like '__\%%')
+ expr: false
type: boolean
- expr: ('_%' like '\_\%\_\%%')
+ expr: false
type: boolean
- expr: ('_%' like '\_\%_%')
+ expr: false
type: boolean
- expr: ('%_' like '\%\_')
+ expr: true
type: boolean
- expr: ('ab' like '\%\_')
+ expr: false
type: boolean
- expr: ('ab' like '_a%')
+ expr: false
type: boolean
- expr: ('ab' like 'a')
+ expr: false
type: boolean
- expr: ('ab' like '')
+ expr: false
type: boolean
- expr: ('' like '')
+ expr: true
type: boolean
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
ListSink
Index: ql/src/test/results/clientpositive/ppd_constant_where.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_constant_where.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/ppd_constant_where.q.out (working copy)
@@ -20,23 +20,19 @@
srcpart
TableScan
alias: srcpart
- Filter Operator
- predicate:
- expr: ('a' = 'a')
- type: boolean
- Select Operator
- Group By Operator
- aggregations:
- expr: count()
- bucketGroup: false
- mode: hash
- outputColumnNames: _col0
- Reduce Output Operator
- sort order:
- tag: -1
- value expressions:
- expr: _col0
- type: bigint
+ Select Operator
+ Group By Operator
+ aggregations:
+ expr: count()
+ bucketGroup: false
+ mode: hash
+ outputColumnNames: _col0
+ Reduce Output Operator
+ sort order:
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: bigint
Reduce Operator Tree:
Group By Operator
aggregations:
Index: ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out (working copy)
@@ -718,8 +718,6 @@
sort order:
tag: 1
value expressions:
- expr: p_partkey
- type: int
expr: p_name
type: string
expr: p_mfgr
@@ -742,9 +740,9 @@
Inner Join 0 to 1
condition expressions:
0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
+ 1 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
File Output Operator
compressed: false
GlobalTableId: 0
@@ -768,8 +766,6 @@
type: string
tag: 0
value expressions:
- expr: _col11
- type: int
expr: _col12
type: string
expr: _col13
@@ -840,10 +836,10 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} {VALUE._col19}
+ 0 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} {VALUE._col19}
1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
+ outputColumnNames: _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
Select Operator
expressions:
expr: _col11
@@ -864,7 +860,7 @@
type: double
expr: _col19
type: string
- expr: _col0
+ expr: 1
type: int
expr: _col1
type: string
Index: ql/src/test/results/clientpositive/udf_hour.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_hour.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_hour.q.out (working copy)
@@ -41,12 +41,12 @@
type: boolean
Select Operator
expressions:
- expr: hour('2009-08-07 13:14:15')
+ expr: 13
type: int
- expr: hour('13:14:15')
+ expr: 13
type: int
- expr: hour('2009-08-07')
- type: int
+ expr: null
+ type: void
outputColumnNames: _col0, _col1, _col2
ListSink
Index: ql/src/test/results/clientpositive/input_part4.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part4.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/input_part4.q.out (working copy)
@@ -27,9 +27,9 @@
type: string
expr: value
type: string
- expr: ds
+ expr: '2008-04-08'
type: string
- expr: hr
+ expr: '15'
type: string
outputColumnNames: _col0, _col1, _col2, _col3
ListSink
Index: ql/src/test/results/clientpositive/udf_rpad.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_rpad.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_rpad.q.out (working copy)
@@ -41,11 +41,11 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: rpad('hi', 1, '?')
+ expr: 'h'
type: string
- expr: rpad('hi', 5, '.')
+ expr: 'hi...'
type: string
- expr: rpad('hi', 6, '123')
+ expr: 'hi1231'
type: string
outputColumnNames: _col0, _col1, _col2
ListSink
Index: ql/src/test/results/clientpositive/udf_parse_url.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_parse_url.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_parse_url.q.out (working copy)
@@ -65,28 +65,28 @@
type: boolean
Select Operator
expressions:
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST')
+ expr: 'facebook.com'
type: string
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PATH')
+ expr: '/path1/p.php'
type: string
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY')
+ expr: 'k1=v1&k2=v2'
type: string
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'REF')
+ expr: 'Ref1'
type: string
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k2')
+ expr: 'v2'
type: string
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k1')
+ expr: 'v1'
type: string
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k3')
+ expr: null
+ type: void
+ expr: '/path1/p.php?k1=v1&k2=v2'
type: string
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'FILE')
+ expr: 'http'
type: string
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PROTOCOL')
+ expr: null
+ type: void
+ expr: 'facebook.com'
type: string
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'USERINFO')
- type: string
- expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'AUTHORITY')
- type: string
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
File Output Operator
compressed: false
Index: ql/src/test/results/clientpositive/select_unquote_not.q.out
===================================================================
--- ql/src/test/results/clientpositive/select_unquote_not.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/select_unquote_not.q.out (working copy)
@@ -70,7 +70,7 @@
alias: npe_test
Filter Operator
predicate:
- expr: (not (ds < ((2012 - 11) - 31)))
+ expr: (not (ds < 1970))
type: boolean
Select Operator
expressions:
Index: ql/src/test/results/clientpositive/infer_const_type.q.out
===================================================================
--- ql/src/test/results/clientpositive/infer_const_type.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/infer_const_type.q.out (working copy)
@@ -62,19 +62,19 @@
type: boolean
Select Operator
expressions:
- expr: ti
+ expr: 127
type: tinyint
- expr: si
+ expr: 32767
type: smallint
- expr: i
+ expr: 12345
type: int
- expr: bi
+ expr: -12345
type: bigint
- expr: fl
+ expr: 906.0
type: float
- expr: db
+ expr: -307.0
type: double
- expr: str
+ expr: '1234'
type: string
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
File Output Operator
@@ -150,7 +150,7 @@
alias: infertypes
Filter Operator
predicate:
- expr: (((((false or false) or false) or false) or false) or false)
+ expr: false
type: boolean
Select Operator
expressions:
@@ -229,7 +229,7 @@
alias: infertypes
Filter Operator
predicate:
- expr: ((false or false) or false)
+ expr: false
type: boolean
Select Operator
expressions:
@@ -316,7 +316,7 @@
type: float
expr: db
type: double
- expr: str
+ expr: '1.57'
type: string
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
File Output Operator
Index: ql/src/test/results/clientpositive/udf_between.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_between.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_between.q.out (working copy)
@@ -27,7 +27,7 @@
alias: src
Filter Operator
predicate:
- expr: (key + 100) BETWEEN (150 + (- 50)) AND (150 + 50)
+ expr: (key + 100) BETWEEN 100 AND 200
type: boolean
Select Operator
expressions:
@@ -86,7 +86,7 @@
alias: src
Filter Operator
predicate:
- expr: (key + 100) NOT BETWEEN (150 + (- 50)) AND (150 + 50)
+ expr: (key + 100) NOT BETWEEN 100 AND 200
type: boolean
Select Operator
expressions:
@@ -143,19 +143,15 @@
Processor Tree:
TableScan
alias: src
- Filter Operator
- predicate:
- expr: 'b' BETWEEN 'a' AND 'c'
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- Limit
- ListSink
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ Limit
+ ListSink
PREHOOK: query: SELECT * FROM src where 'b' between 'a' AND 'c' LIMIT 1
PREHOOK: type: QUERY
@@ -183,19 +179,15 @@
Processor Tree:
TableScan
alias: src
- Filter Operator
- predicate:
- expr: 2 BETWEEN 2 AND '3'
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- Limit
- ListSink
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ Limit
+ ListSink
PREHOOK: query: SELECT * FROM src where 2 between 2 AND '3' LIMIT 1
PREHOOK: type: QUERY
Index: ql/src/test/results/clientpositive/subquery_in_having.q.out
===================================================================
--- ql/src/test/results/clientpositive/subquery_in_having.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/subquery_in_having.q.out (working copy)
@@ -160,24 +160,20 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: bigint
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: bigint
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-3
Map Reduce
@@ -425,26 +421,22 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1, _col2
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col2
- type: bigint
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: bigint
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-3
Map Reduce
@@ -655,24 +647,20 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: double
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: double
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-3
Map Reduce
@@ -872,35 +860,31 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ Group By Operator
+ aggregations:
+ expr: count()
+ bucketGroup: false
+ keys:
expr: _col0
type: string
expr: _col1
type: string
- outputColumnNames: _col0, _col1
- Group By Operator
- aggregations:
- expr: count()
- bucketGroup: false
- keys:
- expr: _col0
- type: string
- expr: _col1
- type: string
- mode: hash
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
@@ -984,26 +968,22 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1, _col2
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col2
- type: bigint
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: bigint
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-5
Map Reduce
@@ -1222,26 +1202,22 @@
1 [Column[_col0]]
outputColumnNames: _col0, _col1, _col2
Position of Big Table: 0
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col2
- type: bigint
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: bigint
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Local Work:
Map Reduce Local Work
@@ -1286,26 +1262,22 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1, _col2
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col2
- type: bigint
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: bigint
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-10
Map Reduce Local Work
@@ -1361,44 +1333,40 @@
1 [Column[_col0]]
outputColumnNames: _col0, _col1
Position of Big Table: 0
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ Group By Operator
+ aggregations:
+ expr: count()
+ bucketGroup: false
+ keys:
expr: _col0
type: string
expr: _col1
type: string
- outputColumnNames: _col0, _col1
- Group By Operator
- aggregations:
- expr: count()
- bucketGroup: false
- keys:
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Reduce Output Operator
+ key expressions:
expr: _col0
type: string
expr: _col1
type: string
- mode: hash
- outputColumnNames: _col0, _col1, _col2
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- sort order: ++
- Map-reduce partition columns:
- expr: _col0
- type: string
- expr: _col1
- type: string
- tag: -1
- value expressions:
- expr: _col2
- type: bigint
+ sort order: ++
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ tag: -1
+ value expressions:
+ expr: _col2
+ type: bigint
Local Work:
Map Reduce Local Work
Reduce Operator Tree:
@@ -1540,26 +1508,22 @@
1 [Column[_col0]]
outputColumnNames: _col0, _col1, _col2
Position of Big Table: 0
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col2
- type: double
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: double
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Local Work:
Map Reduce Local Work
@@ -1604,26 +1568,22 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1, _col2
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col2
- type: double
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: double
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-3
Map Reduce
Index: ql/src/test/results/clientpositive/subquery_in.q.out
===================================================================
--- ql/src/test/results/clientpositive/subquery_in.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/subquery_in.q.out (working copy)
@@ -161,24 +161,20 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -304,24 +300,20 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -514,24 +506,20 @@
1
handleSkewJoin: false
outputColumnNames: _col1, _col5
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col1
- type: string
- expr: _col5
- type: int
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col1
+ type: string
+ expr: _col5
+ type: int
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -741,26 +729,22 @@
1
handleSkewJoin: false
outputColumnNames: _col1, _col2, _col5
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col2
- type: string
- expr: _col1
- type: string
- expr: _col5
- type: int
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col2
+ type: string
+ expr: _col1
+ type: string
+ expr: _col5
+ type: int
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -940,24 +924,20 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
Index: ql/src/test/results/clientpositive/udf_second.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_second.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_second.q.out (working copy)
@@ -41,12 +41,12 @@
type: boolean
Select Operator
expressions:
- expr: second('2009-08-07 13:14:15')
+ expr: 15
type: int
- expr: second('13:14:15')
+ expr: 15
type: int
- expr: second('2009-08-07')
- type: int
+ expr: null
+ type: void
outputColumnNames: _col0, _col1, _col2
ListSink
Index: ql/src/test/results/clientpositive/udf_hash.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_hash.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_hash.q.out (working copy)
@@ -40,27 +40,27 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: hash(UDFToByte(1))
+ expr: 1
type: int
- expr: hash(UDFToShort(2))
+ expr: 2
type: int
- expr: hash(3)
+ expr: 3
type: int
- expr: hash(UDFToLong('123456789012'))
+ expr: -1097262584
type: int
- expr: hash(UDFToFloat(1.25))
+ expr: 1067450368
type: int
- expr: hash(16.0)
+ expr: 1076887552
type: int
- expr: hash('400')
+ expr: 51508
type: int
- expr: hash('abc')
+ expr: 96354
type: int
- expr: hash(true)
+ expr: 1
type: int
- expr: hash(false)
+ expr: 0
type: int
- expr: hash(1,2,3)
+ expr: 1026
type: int
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
ListSink
Index: ql/src/test/results/clientpositive/input6.q.out
===================================================================
--- ql/src/test/results/clientpositive/input6.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/input6.q.out (working copy)
@@ -37,8 +37,8 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: string
+ expr: null
+ type: void
expr: value
type: string
outputColumnNames: _col0, _col1
Index: ql/src/test/results/clientpositive/udf7.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf7.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf7.q.out (working copy)
@@ -49,56 +49,56 @@
alias: dest1
Select Operator
expressions:
- expr: round(ln(3.0), 12)
+ expr: 1.098612288668
type: double
- expr: ln(0.0)
+ expr: null
+ type: void
+ expr: null
+ type: void
+ expr: 1.098612288668
type: double
- expr: ln((- 1))
+ expr: null
+ type: void
+ expr: null
+ type: void
+ expr: 1.584962500721
type: double
- expr: round(log(3.0), 12)
+ expr: null
+ type: void
+ expr: null
+ type: void
+ expr: 0.47712125472
type: double
- expr: log(0.0)
+ expr: null
+ type: void
+ expr: null
+ type: void
+ expr: 1.584962500721
type: double
- expr: log((- 1))
+ expr: null
+ type: void
+ expr: null
+ type: void
+ expr: null
+ type: void
+ expr: -1.0
type: double
- expr: round(log2(3.0), 12)
+ expr: 7.389056098931
type: double
- expr: log2(0.0)
+ expr: 8.0
type: double
- expr: log2((- 1))
+ expr: 8.0
type: double
- expr: round(log10(3.0), 12)
+ expr: 0.125
type: double
- expr: log10(0.0)
+ expr: 8.0
type: double
- expr: log10((- 1))
+ expr: 2.0
type: double
- expr: round(log(2, 3.0), 12)
+ expr: NaN
type: double
- expr: log(2, 0.0)
+ expr: 1.0
type: double
- expr: log(2, (- 1))
- type: double
- expr: log(0.5, 2)
- type: double
- expr: log(2, 0.5)
- type: double
- expr: round(exp(2.0), 12)
- type: double
- expr: power(2, 3)
- type: double
- expr: power(2, 3)
- type: double
- expr: power(2, (- 3))
- type: double
- expr: power(0.5, (- 3))
- type: double
- expr: power(4, 0.5)
- type: double
- expr: power((- 1), 0.5)
- type: double
- expr: power((- 1), 2)
- type: double
expr: power(CAST( 1 AS decimal(10,0)), 0)
type: double
expr: power(CAST( 2 AS decimal(10,0)), 3)
Index: ql/src/test/results/clientpositive/join38.q.out
===================================================================
--- ql/src/test/results/clientpositive/join38.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/join38.q.out (working copy)
@@ -98,8 +98,8 @@
1 {col5}
handleSkewJoin: false
keys:
- 0 [Column[key]]
- 1 [Column[col11]]
+ 0 [Const string 111]
+ 1 [Const string 111]
Position of Big Table: 1
Stage: Stage-1
@@ -120,8 +120,8 @@
1 {col5}
handleSkewJoin: false
keys:
- 0 [Column[key]]
- 1 [Column[col11]]
+ 0 [Const string 111]
+ 1 [Const string 111]
outputColumnNames: _col1, _col9
Position of Big Table: 1
Select Operator
Index: ql/src/test/results/clientpositive/mapjoin1.q.out
===================================================================
--- ql/src/test/results/clientpositive/mapjoin1.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/mapjoin1.q.out (working copy)
@@ -372,19 +372,15 @@
a
TableScan
alias: a
- Filter Operator
- predicate:
- expr: true
- type: boolean
- HashTable Sink Operator
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- Position of Big Table: 1
+ HashTable Sink Operator
+ condition expressions:
+ 0 {key} {value}
+ 1 {key} {value}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[key]]
+ 1 [Column[key]]
+ Position of Big Table: 1
Stage: Stage-1
Map Reduce
@@ -392,41 +388,37 @@
b
TableScan
alias: b
- Filter Operator
- predicate:
- expr: true
- type: boolean
- Map Join Operator
- condition map:
- Right Outer Join0 to 1
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- outputColumnNames: _col0, _col1, _col4, _col5
- Position of Big Table: 1
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col4
- type: string
- expr: _col5
- type: string
- outputColumnNames: _col0, _col1, _col2, _col3
- Limit
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Map Join Operator
+ condition map:
+ Right Outer Join0 to 1
+ condition expressions:
+ 0 {key} {value}
+ 1 {key} {value}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[key]]
+ 1 [Column[key]]
+ outputColumnNames: _col0, _col1, _col4, _col5
+ Position of Big Table: 1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col4
+ type: string
+ expr: _col5
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Limit
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Local Work:
Map Reduce Local Work
Index: ql/src/test/results/clientpositive/udf_when.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_when.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_when.q.out (working copy)
@@ -80,18 +80,18 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: CASE WHEN ((1 = 1)) THEN (2) WHEN ((1 = 3)) THEN (4) ELSE (5) END
+ expr: 2
type: int
- expr: CASE WHEN ((6 = 7)) THEN (8) ELSE (9) END
+ expr: 9
type: int
- expr: CASE WHEN ((10 = 11)) THEN (12) WHEN ((13 = 13)) THEN (14) END
+ expr: 14
type: int
- expr: CASE WHEN ((15 = 16)) THEN (17) WHEN ((18 = 19)) THEN (20) END
+ expr: null
+ type: void
+ expr: CASE WHEN (false) THEN (null) WHEN (true) THEN (24) END
type: int
- expr: CASE WHEN ((21 = 22)) THEN (null) WHEN ((23 = 23)) THEN (24) END
+ expr: CASE WHEN (false) THEN (27) WHEN (true) THEN (null) END
type: int
- expr: CASE WHEN ((25 = 26)) THEN (27) WHEN ((28 = 28)) THEN (null) END
- type: int
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
ListSink
Index: ql/src/test/results/clientpositive/transform_ppr1.q.out
===================================================================
--- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/transform_ppr1.q.out (working copy)
@@ -77,7 +77,7 @@
numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE
tag: -1
value expressions:
- expr: _col0
+ expr: '2008-04-08'
type: string
expr: _col1
type: string
Index: ql/src/test/results/clientpositive/regexp_extract.q.out
===================================================================
--- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy)
@@ -39,9 +39,9 @@
type: string
expr: value
type: string
- expr: (1 + 2)
+ expr: 3
type: int
- expr: (3 + 4)
+ expr: 7
type: int
outputColumnNames: _col0, _col1, _col2, _col3
Statistics:
@@ -317,9 +317,9 @@
type: string
expr: value
type: string
- expr: (1 + 2)
+ expr: 3
type: int
- expr: (3 + 4)
+ expr: 7
type: int
outputColumnNames: _col0, _col1, _col2, _col3
Statistics:
Index: ql/src/test/results/clientpositive/subquery_notexists.q.out
===================================================================
--- ql/src/test/results/clientpositive/subquery_notexists.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/subquery_notexists.q.out (working copy)
@@ -91,7 +91,7 @@
outputColumnNames: _col0, _col1, _col6
Filter Operator
predicate:
- expr: ((1 = 1) and _col6 is null)
+ expr: _col6 is null
type: boolean
Select Operator
expressions:
@@ -387,7 +387,7 @@
outputColumnNames: _col0, _col1, _col5
Filter Operator
predicate:
- expr: ((1 = 1) and _col5 is null)
+ expr: _col5 is null
type: boolean
Select Operator
expressions:
Index: ql/src/test/results/clientpositive/index_auto_update.q.out
===================================================================
--- ql/src/test/results/clientpositive/index_auto_update.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/index_auto_update.q.out (working copy)
@@ -343,7 +343,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '86'
type: string
expr: val
type: string
Index: ql/src/test/results/clientpositive/ppd_outer_join5.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_outer_join5.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/ppd_outer_join5.q.out (working copy)
@@ -88,16 +88,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: id
+ expr: 20
type: int
sort order: +
Map-reduce partition columns:
- expr: id
+ expr: 20
type: int
tag: 2
value expressions:
- expr: id
- type: int
expr: key
type: string
expr: value
@@ -110,9 +108,9 @@
condition expressions:
0 {VALUE._col0} {VALUE._col1} {VALUE._col2}
1 {VALUE._col0} {VALUE._col1} {VALUE._col2}
- 2 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 2 {VALUE._col1} {VALUE._col2}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10, _col11, _col12
+ outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col11, _col12
Select Operator
expressions:
expr: _col0
@@ -127,7 +125,7 @@
type: string
expr: _col7
type: string
- expr: _col10
+ expr: 20
type: int
expr: _col11
type: string
@@ -170,16 +168,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: id
+ expr: 20
type: int
sort order: +
Map-reduce partition columns:
- expr: id
+ expr: 20
type: int
tag: 0
value expressions:
- expr: id
- type: int
expr: key
type: string
expr: value
@@ -193,16 +189,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: id
+ expr: 20
type: int
sort order: +
Map-reduce partition columns:
- expr: id
+ expr: 20
type: int
tag: 1
value expressions:
- expr: id
- type: int
expr: key
type: string
expr: value
@@ -236,20 +230,20 @@
Inner Join 0 to 1
Left Outer Join1 to 2
condition expressions:
- 0 {VALUE._col0} {VALUE._col1} {VALUE._col2}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 0 {VALUE._col1} {VALUE._col2}
+ 1 {VALUE._col1} {VALUE._col2}
2 {VALUE._col0} {VALUE._col1} {VALUE._col2}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10, _col11, _col12
+ outputColumnNames: _col1, _col2, _col6, _col7, _col10, _col11, _col12
Select Operator
expressions:
- expr: _col0
+ expr: 20
type: int
expr: _col1
type: string
expr: _col2
type: string
- expr: _col5
+ expr: 20
type: int
expr: _col6
type: string
@@ -298,16 +292,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: id
+ expr: 20
type: int
sort order: +
Map-reduce partition columns:
- expr: id
+ expr: 20
type: int
tag: 0
value expressions:
- expr: id
- type: int
expr: key
type: string
expr: value
@@ -321,16 +313,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: id
+ expr: 20
type: int
sort order: +
Map-reduce partition columns:
- expr: id
+ expr: 20
type: int
tag: 1
value expressions:
- expr: id
- type: int
expr: key
type: string
expr: value
@@ -364,20 +354,20 @@
Inner Join 0 to 1
Left Outer Join0 to 2
condition expressions:
- 0 {VALUE._col0} {VALUE._col1} {VALUE._col2}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 0 {VALUE._col1} {VALUE._col2}
+ 1 {VALUE._col1} {VALUE._col2}
2 {VALUE._col0} {VALUE._col1} {VALUE._col2}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10, _col11, _col12
+ outputColumnNames: _col1, _col2, _col6, _col7, _col10, _col11, _col12
Select Operator
expressions:
- expr: _col0
+ expr: 20
type: int
expr: _col1
type: string
expr: _col2
type: string
- expr: _col5
+ expr: 20
type: int
expr: _col6
type: string
Index: ql/src/test/results/clientpositive/quotedid_partition.q.out
===================================================================
--- ql/src/test/results/clientpositive/quotedid_partition.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/quotedid_partition.q.out (working copy)
@@ -50,17 +50,15 @@
type: boolean
Select Operator
expressions:
- expr: x+1
- type: string
expr: y&y
type: string
expr: !@#$%^&*()_q
type: string
- outputColumnNames: x+1, y&y, !@#$%^&*()_q
+ outputColumnNames: y&y, !@#$%^&*()_q
Group By Operator
bucketGroup: false
keys:
- expr: x+1
+ expr: '10'
type: string
expr: y&y
type: string
Index: ql/src/test/results/clientpositive/ppd_union_view.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_union_view.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/ppd_union_view.q.out (working copy)
@@ -438,7 +438,7 @@
type: string
expr: value
type: string
- expr: ds
+ expr: '2011-10-13'
type: string
outputColumnNames: _col0, _col1, _col2
Statistics:
@@ -618,13 +618,13 @@
key expressions:
expr: keymap
type: string
- expr: ds
+ expr: '2011-10-15'
type: string
sort order: ++
Map-reduce partition columns:
expr: keymap
type: string
- expr: ds
+ expr: '2011-10-15'
type: string
Statistics:
numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
@@ -649,13 +649,13 @@
key expressions:
expr: keymap
type: string
- expr: ds
+ expr: '2011-10-15'
type: string
sort order: ++
Map-reduce partition columns:
expr: keymap
type: string
- expr: ds
+ expr: '2011-10-15'
type: string
Statistics:
numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
@@ -663,18 +663,16 @@
value expressions:
expr: value
type: string
- expr: ds
- type: string
Needs Tagging: true
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col1} {VALUE._col2}
+ 0 {VALUE._col1}
1 {VALUE._col0}
handleSkewJoin: false
- outputColumnNames: _col1, _col2, _col5
+ outputColumnNames: _col1, _col5
Statistics:
numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
Select Operator
@@ -683,7 +681,7 @@
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '2011-10-15'
type: string
outputColumnNames: _col0, _col1, _col2
Statistics:
Index: ql/src/test/results/clientpositive/column_access_stats.q.out
===================================================================
--- ql/src/test/results/clientpositive/column_access_stats.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/column_access_stats.q.out (working copy)
@@ -633,8 +633,6 @@
value expressions:
expr: key
type: string
- expr: val
- type: string
t2
TableScan
alias: t2
@@ -654,28 +652,22 @@
value expressions:
expr: key
type: string
- expr: val
- type: string
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0} {VALUE._col1}
+ 0 {VALUE._col0}
+ 1 {VALUE._col0}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col4, _col5
+ outputColumnNames: _col0, _col4
Select Operator
expressions:
expr: _col0
type: string
- expr: _col1
- type: string
expr: _col4
type: string
- expr: _col5
- type: string
- outputColumnNames: _col0, _col1, _col2, _col3
+ outputColumnNames: _col0, _col2
File Output Operator
compressed: false
GlobalTableId: 0
@@ -693,18 +685,18 @@
key expressions:
expr: _col0
type: string
- expr: _col1
+ expr: '3'
type: string
sort order: ++
tag: -1
value expressions:
expr: _col0
type: string
- expr: _col1
+ expr: '3'
type: string
expr: _col2
type: string
- expr: _col3
+ expr: '3'
type: string
Reduce Operator Tree:
Extract
Index: ql/src/test/results/clientpositive/udf_elt.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_elt.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_elt.q.out (working copy)
@@ -55,28 +55,28 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: elt(2, 'abc', 'defg')
+ expr: 'defg'
type: string
- expr: elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg')
+ expr: 'cc'
type: string
- expr: elt('1', 'abc', 'defg')
+ expr: 'abc'
type: string
- expr: elt(2, 'aa', UDFToByte('2'))
+ expr: '2'
type: string
- expr: elt(2, 'aa', UDFToShort('12345'))
+ expr: '12345'
type: string
- expr: elt(2, 'aa', UDFToLong('123456789012'))
+ expr: '123456789012'
type: string
- expr: elt(2, 'aa', UDFToFloat(1.25))
+ expr: '1.25'
type: string
- expr: elt(2, 'aa', 16.0)
+ expr: '16.0'
type: string
expr: elt(null, 'abc', 'defg')
type: string
- expr: elt(0, 'abc', 'defg')
- type: string
- expr: elt(3, 'abc', 'defg')
- type: string
+ expr: null
+ type: void
+ expr: null
+ type: void
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
ListSink
Index: ql/src/test/results/clientpositive/select_unquote_or.q.out
===================================================================
--- ql/src/test/results/clientpositive/select_unquote_or.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/select_unquote_or.q.out (working copy)
@@ -70,7 +70,7 @@
alias: npe_test
Filter Operator
predicate:
- expr: ((ds > ((2012 - 11) - 31)) or (ds < ((2012 - 12) - 15)))
+ expr: ((ds > 1970) or (ds < 1985))
type: boolean
Select Operator
expressions:
Index: ql/src/test/results/clientpositive/udf_printf.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_printf.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_printf.q.out (working copy)
@@ -41,7 +41,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: printf('Hello World %d %s', 100, 'days')
+ expr: 'Hello World 100 days'
type: string
outputColumnNames: _col0
ListSink
Index: ql/src/test/results/clientpositive/udf_degrees.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_degrees.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_degrees.q.out (working copy)
@@ -20,7 +20,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: degrees(pi())
+ expr: 180.0
type: double
outputColumnNames: _col0
ListSink
@@ -70,7 +70,7 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: degrees(pi())
+ expr: 180.0
type: double
outputColumnNames: _col0
ListSink
Index: ql/src/test/results/clientpositive/ppd_udf_col.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_udf_col.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/ppd_udf_col.q.out (working copy)
@@ -28,7 +28,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '100'
type: string
expr: rand()
type: double
@@ -90,7 +90,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '100'
type: string
expr: rand()
type: double
@@ -160,11 +160,11 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '100'
type: string
expr: rand()
type: double
- expr: hex(4)
+ expr: '4'
type: string
outputColumnNames: _col0, _col2, _col3
Filter Operator
@@ -222,7 +222,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '100'
type: string
expr: rand()
type: double
@@ -284,7 +284,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '100'
type: string
expr: rand()
type: double
@@ -346,7 +346,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '100'
type: string
expr: rand()
type: double
@@ -405,11 +405,11 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '100'
type: string
expr: rand()
type: double
- expr: hex(4)
+ expr: '4'
type: string
outputColumnNames: _col0, _col2, _col3
Filter Operator
@@ -467,7 +467,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '100'
type: string
expr: rand()
type: double
Index: ql/src/test/results/clientpositive/udf_lpad.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_lpad.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_lpad.q.out (working copy)
@@ -41,11 +41,11 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: lpad('hi', 1, '?')
+ expr: 'h'
type: string
- expr: lpad('hi', 5, '.')
+ expr: '...hi'
type: string
- expr: lpad('hi', 6, '123')
+ expr: '1231hi'
type: string
outputColumnNames: _col0, _col1, _col2
ListSink
Index: ql/src/test/results/clientpositive/constprog2.q.out
===================================================================
--- ql/src/test/results/clientpositive/constprog2.q.out (revision 0)
+++ ql/src/test/results/clientpositive/constprog2.q.out (revision 0)
@@ -0,0 +1,95 @@
+PREHOOK: query: EXPLAIN
+SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) src1) (TOK_TABREF (TOK_TABNAME src) src2) (AND (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)) (= (. (TOK_TABLE_OR_COL src1) key) 86)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)) (TOK_SELEXPR (+ (. (TOK_TABLE_OR_COL src1) key) 1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src1
+ TableScan
+ alias: src1
+ Filter Operator
+ predicate:
+ expr: (key = 86)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: '86'
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: '86'
+ type: string
+ tag: 0
+ src2
+ TableScan
+ alias: src2
+ Filter Operator
+ predicate:
+ expr: (key = 86)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: '86'
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: '86'
+ type: string
+ tag: 1
+ value expressions:
+ expr: value
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0
+ 1 {VALUE._col1}
+ handleSkewJoin: false
+ outputColumnNames: _col5
+ Select Operator
+ expressions:
+ expr: '86'
+ type: string
+ expr: 87.0
+ type: double
+ expr: _col5
+ type: string
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+86 87.0 val_86
Index: ql/src/test/results/clientpositive/constprog_dp.q.out
===================================================================
--- ql/src/test/results/clientpositive/constprog_dp.q.out (revision 0)
+++ ql/src/test/results/clientpositive/constprog_dp.q.out (revision 0)
@@ -0,0 +1,125 @@
+PREHOOK: query: create table dest(key string, value string) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table dest(key string, value string) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest
+PREHOOK: query: EXPLAIN
+from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcpart))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest) (TOK_PARTSPEC (TOK_PARTVAL ds)))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '2008-04-08'))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
+ Stage-4
+ Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
+ Stage-2 depends on stages: Stage-0
+ Stage-3
+ Stage-5
+ Stage-6 depends on stages: Stage-5
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ srcpart
+ TableScan
+ alias: srcpart
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ expr: ds
+ type: string
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest
+
+ Stage: Stage-7
+ Conditional Operator
+
+ Stage: Stage-4
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ partition:
+ ds
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+ Stage: Stage-3
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest
+
+ Stage: Stage-5
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest
+
+ Stage: Stage-6
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@dest
+POSTHOOK: query: from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@dest@ds=2008-04-08
+POSTHOOK: Lineage: dest PARTITION(ds=2008-04-08).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest PARTITION(ds=2008-04-08).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
Index: ql/src/test/results/clientpositive/input23.q.out
===================================================================
--- ql/src/test/results/clientpositive/input23.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/input23.q.out (working copy)
@@ -58,10 +58,6 @@
type: string
expr: value
type: string
- expr: ds
- type: string
- expr: hr
- type: string
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -116,9 +112,9 @@
Inner Join 0 to 1
condition expressions:
0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3}
+ 1 {VALUE._col0} {VALUE._col1}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7, _col8, _col9
+ outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7
Statistics:
numRows: 31 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE
Select Operator
@@ -135,9 +131,9 @@
type: string
expr: _col7
type: string
- expr: _col8
+ expr: '2008-04-08'
type: string
- expr: _col9
+ expr: '14'
type: string
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
Statistics:
Index: ql/src/test/results/clientpositive/regex_col.q.out
===================================================================
--- ql/src/test/results/clientpositive/regex_col.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/regex_col.q.out (working copy)
@@ -232,7 +232,7 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '103'
type: string
expr: hr
type: string
@@ -240,7 +240,7 @@
type: string
sort order: +++
Map-reduce partition columns:
- expr: key
+ expr: '103'
type: string
expr: hr
type: string
@@ -256,7 +256,7 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '103'
type: string
expr: hr
type: string
@@ -264,7 +264,7 @@
type: string
sort order: +++
Map-reduce partition columns:
- expr: key
+ expr: '103'
type: string
expr: hr
type: string
Index: ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out
===================================================================
--- ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out (working copy)
@@ -738,8 +738,6 @@
sort order:
tag: 1
value expressions:
- expr: p_partkey
- type: int
expr: p_name
type: string
expr: p_mfgr
@@ -762,9 +760,9 @@
Inner Join 0 to 1
condition expressions:
0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
+ 1 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
File Output Operator
compressed: false
GlobalTableId: 0
@@ -788,8 +786,6 @@
type: string
tag: 0
value expressions:
- expr: _col11
- type: int
expr: _col12
type: string
expr: _col13
@@ -860,13 +856,13 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} {VALUE._col19}
+ 0 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} {VALUE._col19}
1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
+ outputColumnNames: _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
Filter Operator
predicate:
- expr: ((_col0 = 1) and (_col23 = _col1))
+ expr: (_col23 = _col1)
type: boolean
Select Operator
expressions:
@@ -888,7 +884,7 @@
type: double
expr: _col19
type: string
- expr: _col0
+ expr: 1
type: int
expr: _col1
type: string
Index: ql/src/test/results/clientpositive/udf_ascii.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_ascii.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_ascii.q.out (working copy)
@@ -41,11 +41,11 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: ascii('Facebook')
+ expr: 70
type: int
- expr: ascii('')
+ expr: 0
type: int
- expr: ascii('!')
+ expr: 33
type: int
outputColumnNames: _col0, _col1, _col2
ListSink
Index: ql/src/test/results/clientpositive/subquery_exists_having.q.out
===================================================================
--- ql/src/test/results/clientpositive/subquery_exists_having.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/subquery_exists_having.q.out (working copy)
@@ -135,24 +135,20 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: bigint
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: bigint
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
Index: ql/src/test/results/clientpositive/input_part6.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part6.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/input_part6.q.out (working copy)
@@ -20,7 +20,7 @@
alias: x
Filter Operator
predicate:
- expr: (ds = ((2008 - 4) - 8))
+ expr: (ds = 1996)
type: boolean
Select Operator
expressions:
@@ -28,7 +28,7 @@
type: string
expr: value
type: string
- expr: ds
+ expr: '1996'
type: string
expr: hr
type: string
Index: ql/src/test/results/clientpositive/subquery_exists.q.out
===================================================================
--- ql/src/test/results/clientpositive/subquery_exists.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/subquery_exists.q.out (working copy)
@@ -95,24 +95,20 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
Index: ql/src/test/results/clientpositive/udf_if.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_if.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_if.q.out (working copy)
@@ -42,15 +42,15 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: if(true, 1, 2)
+ expr: 1
type: int
- expr: if(false, UDFToString(null), UDFToString(1))
+ expr: if(false, UDFToString(null), '1')
type: string
- expr: if((1 = 1), if((2 = 2), 1, 2), if((3 = 3), 3, 4))
+ expr: 1
type: int
- expr: if((2 = 2), 1, null)
+ expr: if(true, 1, null)
type: int
- expr: if((2 = 2), null, 1)
+ expr: if(true, null, 1)
type: int
expr: if(if(true, null, false), 1, 2)
type: int
@@ -110,13 +110,13 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: if(true, UDFToShort(128), UDFToByte(1))
+ expr: 128
type: smallint
- expr: if(false, 1, 1.1)
+ expr: 1.1
type: double
- expr: if(false, 1, 'ABC')
+ expr: 'ABC'
type: string
- expr: if(false, 'ABC', 12.3)
+ expr: '12.3'
type: string
outputColumnNames: _col0, _col1, _col2, _col3
ListSink
Index: ql/src/test/results/clientpositive/udf_minute.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_minute.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_minute.q.out (working copy)
@@ -42,12 +42,12 @@
type: boolean
Select Operator
expressions:
- expr: minute('2009-08-07 13:14:15')
+ expr: 14
type: int
- expr: minute('13:14:15')
+ expr: 14
type: int
- expr: minute('2009-08-07')
- type: int
+ expr: null
+ type: void
outputColumnNames: _col0, _col1, _col2
File Output Operator
compressed: false
Index: ql/src/test/results/clientpositive/constprog_type.q.out
===================================================================
--- ql/src/test/results/clientpositive/constprog_type.q.out (revision 0)
+++ ql/src/test/results/clientpositive/constprog_type.q.out (revision 0)
@@ -0,0 +1,133 @@
+PREHOOK: query: CREATE TABLE dest1(d date, t timestamp)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(d date, t timestamp)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION TOK_DATE '2013-11-17')) (TOK_SELEXPR (TOK_FUNCTION TOK_TIMESTAMP (TOK_FUNCTION TOK_DOUBLE '1.3041352164485E9'))))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
+ Stage-4
+ Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
+ Stage-2 depends on stages: Stage-0
+ Stage-3
+ Stage-5
+ Stage-6 depends on stages: Stage-5
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
+ Row Limit Per Split: 1
+ Select Operator
+ expressions:
+ expr: 2013-11-17
+ type: date
+ expr: 2011-04-29 20:46:56.4485
+ type: timestamp
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest1
+
+ Stage: Stage-7
+ Conditional Operator
+
+ Stage: Stage-4
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest1
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+ Stage: Stage-3
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest1
+
+ Stage: Stage-5
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest1
+
+ Stage: Stage-6
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.d EXPRESSION []
+POSTHOOK: Lineage: dest1.t EXPRESSION []
+PREHOOK: query: SELECT * FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: dest1.d EXPRESSION []
+POSTHOOK: Lineage: dest1.t EXPRESSION []
+2013-11-17 2011-04-29 20:46:56.4485
Index: ql/src/test/results/clientpositive/cluster.q.out
===================================================================
--- ql/src/test/results/clientpositive/cluster.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/cluster.q.out (working copy)
@@ -24,22 +24,20 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: string
expr: value
type: string
- outputColumnNames: _col0, _col1
+ outputColumnNames: _col1
Reduce Output Operator
key expressions:
- expr: _col0
+ expr: '10'
type: string
sort order: +
Map-reduce partition columns:
- expr: _col0
+ expr: '10'
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '10'
type: string
expr: _col1
type: string
@@ -92,22 +90,20 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: string
expr: value
type: string
- outputColumnNames: _col0, _col1
+ outputColumnNames: _col1
Reduce Output Operator
key expressions:
- expr: _col0
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: _col0
+ expr: '20'
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
@@ -160,22 +156,20 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: string
expr: value
type: string
- outputColumnNames: _col0, _col1
+ outputColumnNames: _col1
Reduce Output Operator
key expressions:
- expr: _col0
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: _col0
+ expr: '20'
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
@@ -228,22 +222,20 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: string
expr: value
type: string
- outputColumnNames: _col0, _col1
+ outputColumnNames: _col1
Reduce Output Operator
key expressions:
- expr: _col0
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: _col0
+ expr: '20'
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
@@ -296,22 +288,20 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: string
expr: value
type: string
- outputColumnNames: _col0, _col1
+ outputColumnNames: _col1
Reduce Output Operator
key expressions:
- expr: _col0
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: _col0
+ expr: '20'
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
@@ -364,22 +354,20 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: string
expr: value
type: string
- outputColumnNames: _col0, _col1
+ outputColumnNames: _col1
Reduce Output Operator
key expressions:
- expr: _col0
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: _col0
+ expr: '20'
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
@@ -432,11 +420,9 @@
type: boolean
Select Operator
expressions:
- expr: key
- type: string
expr: value
type: string
- outputColumnNames: _col0, _col1
+ outputColumnNames: _col1
Reduce Output Operator
key expressions:
expr: _col1
@@ -447,7 +433,7 @@
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
@@ -523,7 +509,7 @@
type: boolean
Select Operator
expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
@@ -576,16 +562,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 0
value expressions:
- expr: key
- type: string
expr: value
type: string
y
@@ -597,34 +581,27 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 1
- value expressions:
- expr: key
- type: string
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0}
+ 0 {VALUE._col1}
+ 1
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col4
+ outputColumnNames: _col1
Select Operator
expressions:
- expr: _col0
- type: string
expr: _col1
type: string
- expr: _col4
- type: string
- outputColumnNames: _col0, _col1, _col2
+ outputColumnNames: _col1
File Output Operator
compressed: false
GlobalTableId: 0
@@ -648,11 +625,11 @@
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '20'
type: string
Reduce Operator Tree:
Extract
@@ -704,16 +681,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 0
value expressions:
- expr: key
- type: string
expr: value
type: string
y
@@ -725,16 +700,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 1
value expressions:
- expr: key
- type: string
expr: value
type: string
Reduce Operator Tree:
@@ -742,21 +715,17 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0} {VALUE._col1}
+ 0 {VALUE._col1}
+ 1 {VALUE._col1}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col4, _col5
+ outputColumnNames: _col1, _col5
Select Operator
expressions:
- expr: _col0
- type: string
expr: _col1
type: string
- expr: _col4
- type: string
expr: _col5
type: string
- outputColumnNames: _col0, _col1, _col2, _col3
+ outputColumnNames: _col1, _col3
File Output Operator
compressed: false
GlobalTableId: 0
@@ -780,11 +749,11 @@
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '20'
type: string
expr: _col3
type: string
@@ -838,16 +807,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 0
value expressions:
- expr: key
- type: string
expr: value
type: string
y
@@ -859,16 +826,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 1
value expressions:
- expr: key
- type: string
expr: value
type: string
Reduce Operator Tree:
@@ -876,21 +841,17 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0} {VALUE._col1}
+ 0 {VALUE._col1}
+ 1 {VALUE._col1}
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col4, _col5
+ outputColumnNames: _col1, _col5
Select Operator
expressions:
- expr: _col0
- type: string
expr: _col1
type: string
- expr: _col4
- type: string
expr: _col5
type: string
- outputColumnNames: _col0, _col1, _col2, _col3
+ outputColumnNames: _col1, _col3
File Output Operator
compressed: false
GlobalTableId: 0
@@ -906,19 +867,19 @@
TableScan
Reduce Output Operator
key expressions:
- expr: _col0
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: _col0
+ expr: '20'
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
- expr: _col2
+ expr: '20'
type: string
expr: _col3
type: string
@@ -972,16 +933,14 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 0
value expressions:
- expr: key
- type: string
expr: value
type: string
y
@@ -993,34 +952,29 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: '20'
type: string
tag: 1
- value expressions:
- expr: key
- type: string
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0}
+ 0 {VALUE._col1}
+ 1
handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col4
+ outputColumnNames: _col1
Select Operator
expressions:
- expr: _col0
- type: string
expr: _col1
type: string
- expr: _col4
+ expr: '20'
type: string
- outputColumnNames: _col0, _col1, _col2
+ outputColumnNames: _col1, _col2
File Output Operator
compressed: false
GlobalTableId: 0
@@ -1036,15 +990,15 @@
TableScan
Reduce Output Operator
key expressions:
- expr: _col0
+ expr: '20'
type: string
sort order: +
Map-reduce partition columns:
- expr: _col0
+ expr: '20'
type: string
tag: -1
value expressions:
- expr: _col0
+ expr: '20'
type: string
expr: _col1
type: string
Index: ql/src/test/results/clientpositive/udf_lower.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_lower.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_lower.q.out (working copy)
@@ -38,9 +38,9 @@
type: boolean
Select Operator
expressions:
- expr: lower('AbC 123')
+ expr: 'abc 123'
type: string
- expr: upper('AbC 123')
+ expr: 'ABC 123'
type: string
outputColumnNames: _col0, _col1
File Output Operator
Index: ql/src/test/results/clientpositive/udf_format_number.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_format_number.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_format_number.q.out (working copy)
@@ -45,11 +45,11 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: format_number(12332.123456, 4)
+ expr: '12,332.1235'
type: string
- expr: format_number(12332.1, 4)
+ expr: '12,332.1000'
type: string
- expr: format_number(12332.2, 0)
+ expr: '12,332'
type: string
outputColumnNames: _col0, _col1, _col2
ListSink
Index: ql/src/test/results/clientpositive/udf9.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf9.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf9.q.out (working copy)
@@ -40,37 +40,37 @@
type: boolean
Select Operator
expressions:
- expr: datediff('2008-12-31', '2009-01-01')
+ expr: -1
type: int
- expr: datediff('2008-03-01', '2008-02-28')
+ expr: 2
type: int
- expr: datediff('2007-03-01', '2007-01-28')
+ expr: 32
type: int
- expr: datediff('2008-03-01 23:59:59', '2008-03-02 00:00:00')
+ expr: -1
type: int
- expr: date_add('2008-12-31', 1)
+ expr: '2009-01-01'
type: string
- expr: date_add('2008-12-31', 365)
+ expr: '2009-12-31'
type: string
- expr: date_add('2008-02-28', 2)
+ expr: '2008-03-01'
type: string
- expr: date_add('2009-02-28', 2)
+ expr: '2009-03-02'
type: string
- expr: date_add('2007-02-28', 365)
+ expr: '2008-02-28'
type: string
- expr: date_add('2007-02-28 23:59:59', 730)
+ expr: '2009-02-27'
type: string
- expr: date_sub('2009-01-01', 1)
+ expr: '2008-12-31'
type: string
- expr: date_sub('2009-01-01', 365)
+ expr: '2008-01-02'
type: string
- expr: date_sub('2008-02-28', 2)
+ expr: '2008-02-26'
type: string
- expr: date_sub('2009-02-28', 2)
+ expr: '2009-02-26'
type: string
- expr: date_sub('2007-02-28', 365)
+ expr: '2006-02-28'
type: string
- expr: date_sub('2007-02-28 01:12:34', 730)
+ expr: '2005-02-28'
type: string
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15
File Output Operator
Index: ql/src/test/results/clientpositive/vectorization_short_regress.q.out
===================================================================
--- ql/src/test/results/clientpositive/vectorization_short_regress.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/vectorization_short_regress.q.out (working copy)
@@ -147,7 +147,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: ((((762 = cbigint) or ((csmallint < cfloat) and ((ctimestamp2 > (- 10669)) and (cdouble <> cint)))) or (cstring1 = 'a')) or ((cbigint <= (- 1.389)) and ((cstring2 <> 'a') and ((79.553 <> cint) and (cboolean2 <> cboolean1)))))
+ expr: ((((762 = cbigint) or ((csmallint < cfloat) and ((ctimestamp2 > -10669) and (cdouble <> cint)))) or (cstring1 = 'a')) or ((cbigint <= -1.389) and ((cstring2 <> 'a') and ((79.553 <> cint) and (cboolean2 <> cboolean1)))))
type: boolean
Vectorized execution: true
Select Operator
@@ -220,13 +220,13 @@
expressions:
expr: _col0
type: double
- expr: (_col0 + (- 3728))
+ expr: (_col0 + -3728)
type: double
- expr: (- (_col0 + (- 3728)))
+ expr: (- (_col0 + -3728))
type: double
- expr: (- (- (_col0 + (- 3728))))
+ expr: (- (- (_col0 + -3728)))
type: double
- expr: ((- (- (_col0 + (- 3728)))) * (_col0 + (- 3728)))
+ expr: ((- (- (_col0 + -3728))) * (_col0 + -3728))
type: double
expr: _col1
type: double
@@ -234,15 +234,15 @@
type: double
expr: _col2
type: double
- expr: (((- (- (_col0 + (- 3728)))) * (_col0 + (- 3728))) * (- (- (_col0 + (- 3728)))))
+ expr: (((- (- (_col0 + -3728))) * (_col0 + -3728)) * (- (- (_col0 + -3728))))
type: double
expr: _col3
type: double
expr: (- _col2)
type: double
- expr: (_col2 - (- (- (_col0 + (- 3728)))))
+ expr: (_col2 - (- (- (_col0 + -3728))))
type: double
- expr: ((_col2 - (- (- (_col0 + (- 3728))))) * _col2)
+ expr: ((_col2 - (- (- (_col0 + -3728)))) * _col2)
type: double
expr: _col4
type: double
@@ -252,11 +252,11 @@
type: double
expr: (- (10.175 - _col4))
type: double
- expr: ((- _col2) / (- 563))
+ expr: ((- _col2) / -563)
type: double
expr: _col6
type: double
- expr: (- ((- _col2) / (- 563)))
+ expr: (- ((- _col2) / -563))
type: double
expr: (_col0 / _col1)
type: double
@@ -264,7 +264,7 @@
type: tinyint
expr: _col8
type: bigint
- expr: (_col7 / ((- _col2) / (- 563)))
+ expr: (_col7 / ((- _col2) / -563))
type: double
expr: (- (_col0 / _col1))
type: double
@@ -451,7 +451,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: (((((cbigint <= 197) and (cint < cbigint)) or ((cdouble >= (- 26.28)) and (csmallint > cdouble))) or ((ctinyint > cfloat) and (cstring1 rlike '.*ss.*'))) or ((cfloat > 79.553) and (cstring2 like '10%')))
+ expr: (((((cbigint <= 197) and (cint < cbigint)) or ((cdouble >= -26.28) and (csmallint > cdouble))) or ((ctinyint > cfloat) and (cstring1 rlike '.*ss.*'))) or ((cfloat > 79.553) and (cstring2 like '10%')))
type: boolean
Vectorized execution: true
Select Operator
@@ -524,17 +524,17 @@
expressions:
expr: _col0
type: int
- expr: (_col0 / (- 3728))
+ expr: (_col0 / -3728)
type: decimal(16,6)
- expr: (_col0 * (- 3728))
+ expr: (_col0 * -3728)
type: int
expr: _col1
type: double
- expr: (- (_col0 * (- 3728)))
+ expr: (- (_col0 * -3728))
type: int
expr: _col2
type: double
- expr: ((- 563) % (_col0 * (- 3728)))
+ expr: (-563 % (_col0 * -3728))
type: int
expr: (_col1 / _col2)
type: double
@@ -548,23 +548,23 @@
type: double
expr: _col5
type: int
- expr: ((_col0 * (- 3728)) % (_col2 - 10.175))
+ expr: ((_col0 * -3728) % (_col2 - 10.175))
type: double
expr: (- _col3)
type: double
expr: _col6
type: double
- expr: (_col3 % (- 26.28))
+ expr: (_col3 % -26.28)
type: double
expr: _col7
type: double
- expr: (- (_col0 / (- 3728)))
+ expr: (- (_col0 / -3728))
type: decimal(16,6)
- expr: ((- (_col0 * (- 3728))) % ((- 563) % (_col0 * (- 3728))))
+ expr: ((- (_col0 * -3728)) % (-563 % (_col0 * -3728)))
type: int
- expr: ((_col0 / (- 3728)) - _col4)
+ expr: ((_col0 / -3728) - _col4)
type: double
- expr: (- (_col0 * (- 3728)))
+ expr: (- (_col0 * -3728))
type: int
expr: _col8
type: double
@@ -825,7 +825,7 @@
type: double
expr: (- (- _col0))
type: double
- expr: ((- 1) % (- _col0))
+ expr: (-1 % (- _col0))
type: double
expr: _col1
type: bigint
@@ -851,7 +851,7 @@
type: bigint
expr: _col7
type: double
- expr: ((- 3728) % (_col2 + (762 * (- _col1))))
+ expr: (-3728 % (_col2 + (762 * (- _col1))))
type: bigint
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21
File Output Operator
@@ -1089,7 +1089,7 @@
type: double
expr: _col5
type: float
- expr: (_col4 * (- 26.28))
+ expr: (_col4 * -26.28)
type: double
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
File Output Operator
@@ -1248,7 +1248,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: (((((cstring1 rlike 'a.*') and (cstring2 like '%ss%')) or ((1 <> cboolean2) and ((csmallint < 79.553) and ((- 257) <> ctinyint)))) or ((cdouble > ctinyint) and (cfloat >= cint))) or ((cint < cbigint) and (ctinyint > cbigint)))
+ expr: (((((cstring1 rlike 'a.*') and (cstring2 like '%ss%')) or ((1 <> cboolean2) and ((csmallint < 79.553) and (-257 <> ctinyint)))) or ((cdouble > ctinyint) and (cfloat >= cint))) or ((cint < cbigint) and (ctinyint > cbigint)))
type: boolean
Select Operator
expressions:
@@ -1272,11 +1272,11 @@
type: smallint
expr: cbigint
type: bigint
- expr: ((- 3728) * cbigint)
+ expr: (-3728 * cbigint)
type: bigint
expr: (- cint)
type: int
- expr: ((- 863.257) - cint)
+ expr: (-863.257 - cint)
type: double
expr: (- csmallint)
type: smallint
@@ -1286,11 +1286,11 @@
type: smallint
expr: (cint / cint)
type: decimal(21,11)
- expr: (((- 863.257) - cint) - (- 26.28))
+ expr: ((-863.257 - cint) - -26.28)
type: double
expr: (- cfloat)
type: float
- expr: (cdouble * (- 89010))
+ expr: (cdouble * -89010)
type: double
expr: (ctinyint / 988888)
type: decimal(10,7)
@@ -2742,7 +2742,7 @@
type: float
expr: (cfloat - (- cfloat))
type: double
- expr: ((cfloat - (- cfloat)) % (- 6432))
+ expr: ((cfloat - (- cfloat)) % -6432)
type: double
expr: (cdouble * csmallint)
type: double
@@ -2944,7 +2944,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: (((((csmallint > (- 26.28)) and (cstring2 like 'ss')) or ((cdouble <= cbigint) and ((cstring1 >= 'ss') and (cint <> cdouble)))) or (ctinyint = (- 89010))) or ((cbigint <= cfloat) and ((- 26.28) <= csmallint)))
+ expr: (((((csmallint > -26.28) and (cstring2 like 'ss')) or ((cdouble <= cbigint) and ((cstring1 >= 'ss') and (cint <> cdouble)))) or (ctinyint = -89010)) or ((cbigint <= cfloat) and (-26.28 <= csmallint)))
type: boolean
Vectorized execution: true
Select Operator
@@ -2985,7 +2985,7 @@
type: bigint
expr: ((- cdouble) + cbigint)
type: double
- expr: ((- 1.389) / ctinyint)
+ expr: (-1.389 / ctinyint)
type: double
expr: (cbigint % cdouble)
type: double
@@ -3814,7 +3814,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: (((((- 1.389) >= cint) and ((csmallint < ctinyint) and ((- 6432) > csmallint))) or ((cdouble >= cfloat) and (cstring2 <= 'a'))) or ((cstring1 like 'ss%') and (10.175 > cbigint)))
+ expr: ((((-1.389 >= cint) and ((csmallint < ctinyint) and (-6432 > csmallint))) or ((cdouble >= cfloat) and (cstring2 <= 'a'))) or ((cstring1 like 'ss%') and (10.175 > cbigint)))
type: boolean
Select Operator
expressions:
@@ -3832,15 +3832,15 @@
type: smallint
expr: (cbigint / 3569)
type: decimal(25,6)
- expr: ((- 257) - csmallint)
+ expr: (-257 - csmallint)
type: int
- expr: ((- 6432) * cfloat)
+ expr: (-6432 * cfloat)
type: double
expr: (- cdouble)
type: double
expr: (cdouble * 10.175)
type: double
- expr: (((- 6432) * cfloat) / cfloat)
+ expr: ((-6432 * cfloat) / cfloat)
type: double
expr: (- cfloat)
type: float
@@ -4699,7 +4699,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: ((csmallint >= (- 257)) and (((- 6432) = csmallint) or ((cint >= cdouble) and (ctinyint <= cint))))
+ expr: ((csmallint >= -257) and ((-6432 = csmallint) or ((cint >= cdouble) and (ctinyint <= cint))))
type: boolean
Vectorized execution: true
Select Operator
@@ -4760,25 +4760,25 @@
expressions:
expr: _col0
type: smallint
- expr: (_col0 % (- 75))
+ expr: (_col0 % -75)
type: int
expr: _col1
type: double
- expr: ((- 1.389) / _col0)
+ expr: (-1.389 / _col0)
type: double
expr: _col2
type: bigint
- expr: ((_col0 % (- 75)) / _col2)
+ expr: ((_col0 % -75) / _col2)
type: decimal(30,20)
- expr: (- (_col0 % (- 75)))
+ expr: (- (_col0 % -75))
type: int
expr: _col3
type: double
- expr: (- (- (_col0 % (- 75))))
+ expr: (- (- (_col0 % -75)))
type: int
expr: _col4
type: bigint
- expr: (_col4 - (- 89010))
+ expr: (_col4 - -89010)
type: bigint
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
File Output Operator
@@ -6413,7 +6413,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: ((cdouble > 2563.58) and ((((cbigint >= cint) and ((csmallint < cint) and (cfloat < (- 5638.15)))) or false) or ((cdouble <= cbigint) and ((- 5638.15) > cbigint))))
+ expr: ((cdouble > 2563.58) and (((cbigint >= cint) and ((csmallint < cint) and (cfloat < -5638.15))) or ((cdouble <= cbigint) and (-5638.15 > cbigint))))
type: boolean
Vectorized execution: true
Select Operator
@@ -6488,9 +6488,9 @@
type: double
expr: _col2
type: bigint
- expr: ((2563.58 * _col1) + (- 5638.15))
+ expr: ((2563.58 * _col1) + -5638.15)
type: double
- expr: ((- _col1) * ((2563.58 * _col1) + (- 5638.15)))
+ expr: ((- _col1) * ((2563.58 * _col1) + -5638.15))
type: double
expr: _col3
type: double
@@ -6506,7 +6506,7 @@
type: double
expr: _col6
type: double
- expr: ((- 863.257) % (_col0 * 762))
+ expr: (-863.257 % (_col0 * 762))
type: double
expr: _col6
type: double
@@ -6769,7 +6769,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: ((ctimestamp1 <> 0) and ((((((((- 257) <> ctinyint) and cboolean2 is not null) and ((cstring1 rlike '.*ss') and ((- 10669) < ctimestamp1))) or (ctimestamp2 = (- 10669))) or ((ctimestamp1 < 0) and (cstring2 like '%b%'))) or (cdouble = cint)) or (cboolean1 is null and (cfloat < cint))))
+ expr: ((ctimestamp1 <> 0) and (((((((-257 <> ctinyint) and cboolean2 is not null) and ((cstring1 rlike '.*ss') and (-10669 < ctimestamp1))) or (ctimestamp2 = -10669)) or ((ctimestamp1 < 0) and (cstring2 like '%b%'))) or (cdouble = cint)) or (cboolean1 is null and (cfloat < cint))))
type: boolean
Vectorized execution: true
Select Operator
@@ -6894,23 +6894,23 @@
type: double
expr: (- _col2)
type: double
- expr: ((- 26.28) - _col2)
+ expr: (-26.28 - _col2)
type: double
expr: _col4
type: bigint
expr: (- _col4)
type: bigint
- expr: (((- 26.28) - _col2) * (- _col2))
+ expr: ((-26.28 - _col2) * (- _col2))
type: double
expr: _col5
type: tinyint
- expr: ((((- 26.28) - _col2) * (- _col2)) * (- _col4))
+ expr: (((-26.28 - _col2) * (- _col2)) * (- _col4))
type: double
expr: (- (_col2 * 10.175))
type: double
expr: _col6
type: double
- expr: (_col6 + ((((- 26.28) - _col2) * (- _col2)) * (- _col4)))
+ expr: (_col6 + (((-26.28 - _col2) * (- _col2)) * (- _col4)))
type: double
expr: (- (- _col2))
type: double
@@ -6924,13 +6924,13 @@
type: double
expr: _col9
type: double
- expr: ((_col6 + ((((- 26.28) - _col2) * (- _col2)) * (- _col4))) - ((((- 26.28) - _col2) * (- _col2)) * (- _col4)))
+ expr: ((_col6 + (((-26.28 - _col2) * (- _col2)) * (- _col4))) - (((-26.28 - _col2) * (- _col2)) * (- _col4)))
type: double
expr: (- (- (_col2 * 10.175)))
type: double
expr: _col10
type: double
- expr: (((_col6 + ((((- 26.28) - _col2) * (- _col2)) * (- _col4))) - ((((- 26.28) - _col2) * (- _col2)) * (- _col4))) * 10.175)
+ expr: (((_col6 + (((-26.28 - _col2) * (- _col2)) * (- _col4))) - (((-26.28 - _col2) * (- _col2)) * (- _col4))) * 10.175)
type: double
expr: (10.175 % (10.175 / _col3))
type: double
@@ -6940,23 +6940,23 @@
type: double
expr: _col12
type: double
- expr: (- (((- 26.28) - _col2) * (- _col2)))
+ expr: (- ((-26.28 - _col2) * (- _col2)))
type: double
expr: ((- _col2) % _col10)
type: double
- expr: ((- 26.28) / (- _col5))
+ expr: (-26.28 / (- _col5))
type: double
expr: _col13
type: double
expr: _col14
type: bigint
- expr: ((_col6 + ((((- 26.28) - _col2) * (- _col2)) * (- _col4))) / _col7)
+ expr: ((_col6 + (((-26.28 - _col2) * (- _col2)) * (- _col4))) / _col7)
type: double
expr: (- (- _col4))
type: bigint
expr: _col4
type: bigint
- expr: ((_col6 + ((((- 26.28) - _col2) * (- _col2)) * (- _col4))) % (- 26.28))
+ expr: ((_col6 + (((-26.28 - _col2) * (- _col2)) * (- _col4))) % -26.28)
type: double
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38
File Output Operator
@@ -7427,7 +7427,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: (cboolean1 is not null and (((((cdouble < csmallint) and ((cboolean2 = cboolean1) and (cbigint <= (- 863.257)))) or ((cint >= (- 257)) and (cstring1 is not null and (cboolean1 >= 1)))) or (cstring2 rlike 'b')) or ((csmallint >= ctinyint) and ctimestamp2 is null)))
+ expr: (cboolean1 is not null and (((((cdouble < csmallint) and ((cboolean2 = cboolean1) and (cbigint <= -863.257))) or ((cint >= -257) and (cstring1 is not null and (cboolean1 >= 1)))) or (cstring2 rlike 'b')) or ((csmallint >= ctinyint) and ctimestamp2 is null)))
type: boolean
Vectorized execution: true
Select Operator
@@ -7524,7 +7524,7 @@
type: float
expr: (- _col1)
type: float
- expr: ((- 26.28) / _col1)
+ expr: (-26.28 / _col1)
type: double
expr: _col2
type: bigint
@@ -7556,13 +7556,13 @@
type: bigint
expr: _col8
type: double
- expr: ((- 1.389) * _col5)
+ expr: (-1.389 * _col5)
type: double
- expr: (_col7 - ((- 1.389) * _col5))
+ expr: (_col7 - (-1.389 * _col5))
type: double
expr: _col9
type: double
- expr: (- (_col7 - ((- 1.389) * _col5)))
+ expr: (- (_col7 - (-1.389 * _col5)))
type: double
expr: _col10
type: double
Index: ql/src/test/results/clientpositive/cast1.q.out
===================================================================
--- ql/src/test/results/clientpositive/cast1.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/cast1.q.out (working copy)
@@ -35,19 +35,19 @@
type: boolean
Select Operator
expressions:
- expr: (3 + 2)
+ expr: 5
type: int
- expr: (3.0 + 2)
+ expr: 5.0
type: double
- expr: (3 + 2.0)
+ expr: 5.0
type: double
- expr: (3.0 + 2.0)
+ expr: 5.0
type: double
- expr: ((3 + UDFToInteger(2.0)) + UDFToInteger(UDFToShort(0)))
+ expr: 5
type: int
- expr: UDFToBoolean(1)
+ expr: true
type: boolean
- expr: UDFToInteger(true)
+ expr: 1
type: int
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
File Output Operator
Index: ql/src/test/results/clientpositive/select_unquote_and.q.out
===================================================================
--- ql/src/test/results/clientpositive/select_unquote_and.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/select_unquote_and.q.out (working copy)
@@ -70,7 +70,7 @@
alias: npe_test
Filter Operator
predicate:
- expr: ((ds > ((2012 - 11) - 31)) and (ds < ((2012 - 12) - 15)))
+ expr: ((ds > 1970) and (ds < 1985))
type: boolean
Select Operator
expressions:
Index: ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
===================================================================
--- ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out (working copy)
@@ -448,7 +448,7 @@
alias: orc_pred
Filter Operator
predicate:
- expr: ((t is not null and (t < 0)) and (t > (- 2)))
+ expr: ((t is not null and (t < 0)) and (t > -2))
type: boolean
Select Operator
expressions:
@@ -527,11 +527,11 @@
TableScan
alias: orc_pred
filterExpr:
- expr: ((t is not null and (t < 0)) and (t > (- 2)))
+ expr: ((t is not null and (t < 0)) and (t > -2))
type: boolean
Filter Operator
predicate:
- expr: ((t is not null and (t < 0)) and (t > (- 2)))
+ expr: ((t is not null and (t < 0)) and (t > -2))
type: boolean
Select Operator
expressions:
@@ -671,11 +671,11 @@
alias: orc_pred
Filter Operator
predicate:
- expr: (((t = (- 1)) and s is not null) and (s like 'bob%'))
+ expr: (((t = -1) and s is not null) and (s like 'bob%'))
type: boolean
Select Operator
expressions:
- expr: t
+ expr: -1
type: tinyint
expr: s
type: string
@@ -743,15 +743,15 @@
TableScan
alias: orc_pred
filterExpr:
- expr: (((t = (- 1)) and s is not null) and (s like 'bob%'))
+ expr: (((t = -1) and s is not null) and (s like 'bob%'))
type: boolean
Filter Operator
predicate:
- expr: (((t = (- 1)) and s is not null) and (s like 'bob%'))
+ expr: (((t = -1) and s is not null) and (s like 'bob%'))
type: boolean
Select Operator
expressions:
- expr: t
+ expr: -1
type: tinyint
expr: s
type: string
@@ -886,7 +886,7 @@
alias: orc_pred
Filter Operator
predicate:
- expr: (((s is not null and (s like 'bob%')) and (not (t) IN ((- 1), (- 2), (- 3)))) and t BETWEEN 25 AND 30)
+ expr: (((s is not null and (s like 'bob%')) and (not (t) IN (-1, -2, -3))) and t BETWEEN 25 AND 30)
type: boolean
Select Operator
expressions:
@@ -962,11 +962,11 @@
TableScan
alias: orc_pred
filterExpr:
- expr: (((s is not null and (s like 'bob%')) and (not (t) IN ((- 1), (- 2), (- 3)))) and t BETWEEN 25 AND 30)
+ expr: (((s is not null and (s like 'bob%')) and (not (t) IN (-1, -2, -3))) and t BETWEEN 25 AND 30)
type: boolean
Filter Operator
predicate:
- expr: (((s is not null and (s like 'bob%')) and (not (t) IN ((- 1), (- 2), (- 3)))) and t BETWEEN 25 AND 30)
+ expr: (((s is not null and (s like 'bob%')) and (not (t) IN (-1, -2, -3))) and t BETWEEN 25 AND 30)
type: boolean
Select Operator
expressions:
@@ -1131,7 +1131,7 @@
alias: orc_pred
Filter Operator
predicate:
- expr: (((((((d >= round(9.99)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
+ expr: (((((((d >= 10.0) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
type: boolean
Select Operator
expressions:
@@ -1222,11 +1222,11 @@
TableScan
alias: orc_pred
filterExpr:
- expr: (((((((d >= round(9.99)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
+ expr: (((((((d >= 10.0) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
type: boolean
Filter Operator
predicate:
- expr: (((((((d >= round(9.99)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
+ expr: (((((((d >= 10.0) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
type: boolean
Select Operator
expressions:
@@ -1407,7 +1407,7 @@
alias: orc_pred
Filter Operator
predicate:
- expr: (((((((((t > 10) and (t <> 101)) and (d >= round(9.99))) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
+ expr: (((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
type: boolean
Select Operator
expressions:
@@ -1534,11 +1534,11 @@
TableScan
alias: orc_pred
filterExpr:
- expr: (((((((((t > 10) and (t <> 101)) and (d >= round(9.99))) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
+ expr: (((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
type: boolean
Filter Operator
predicate:
- expr: (((((((((t > 10) and (t <> 101)) and (d >= round(9.99))) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
+ expr: (((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400)
type: boolean
Select Operator
expressions:
Index: ql/src/test/results/clientpositive/udf_space.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_space.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_space.q.out (working copy)
@@ -43,15 +43,15 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: space(10)
+ expr: ' '
type: string
- expr: space(0)
+ expr: ''
type: string
- expr: space(1)
+ expr: ' '
type: string
- expr: space((- 1))
+ expr: ''
type: string
- expr: space((- 100))
+ expr: ''
type: string
outputColumnNames: _col0, _col1, _col2, _col3, _col4
ListSink
Index: ql/src/test/results/clientpositive/udf4.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf4.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf4.q.out (working copy)
@@ -79,73 +79,73 @@
alias: dest1
Select Operator
expressions:
- expr: round(1.0)
+ expr: 1.0
type: double
- expr: round(1.5)
+ expr: 2.0
type: double
- expr: round((- 1.5))
+ expr: -2.0
type: double
- expr: floor(1.0)
+ expr: 1
type: bigint
- expr: floor(1.5)
+ expr: 1
type: bigint
- expr: floor((- 1.5))
+ expr: -2
type: bigint
- expr: sqrt(1.0)
+ expr: 1.0
type: double
- expr: sqrt((- 1.0))
+ expr: null
+ type: void
+ expr: 0.0
type: double
- expr: sqrt(0.0)
- type: double
- expr: ceil(1.0)
+ expr: 1
type: bigint
- expr: ceil(1.5)
+ expr: 2
type: bigint
- expr: ceil((- 1.5))
+ expr: -1
type: bigint
- expr: ceil(1.0)
+ expr: 1
type: bigint
expr: rand(3)
type: double
expr: 3
type: int
- expr: (- 3)
+ expr: -3
type: int
- expr: (1 + 2)
+ expr: 3
type: int
- expr: (1 + (- 2))
+ expr: -1
type: int
- expr: (~ 1)
+ expr: -2
type: int
- expr: (~ UDFToByte(1))
+ expr: -2
type: tinyint
- expr: (~ UDFToShort(1))
+ expr: -2
type: smallint
- expr: (~ UDFToLong(1))
+ expr: -2
type: bigint
- expr: (UDFToByte(1) & UDFToByte(2))
+ expr: 0
type: tinyint
- expr: (UDFToShort(1) & UDFToShort(2))
+ expr: 0
type: smallint
- expr: (1 & 2)
+ expr: 0
type: int
- expr: (UDFToLong(1) & UDFToLong(2))
+ expr: 0
type: bigint
- expr: (UDFToByte(1) | UDFToByte(2))
+ expr: 3
type: tinyint
- expr: (UDFToShort(1) | UDFToShort(2))
+ expr: 3
type: smallint
- expr: (1 | 2)
+ expr: 3
type: int
- expr: (UDFToLong(1) | UDFToLong(2))
+ expr: 3
type: bigint
- expr: (UDFToByte(1) ^ UDFToByte(3))
+ expr: 2
type: tinyint
- expr: (UDFToShort(1) ^ UDFToShort(3))
+ expr: 2
type: smallint
- expr: (1 ^ 3)
+ expr: 2
type: int
- expr: (UDFToLong(1) ^ UDFToLong(3))
+ expr: 2
type: bigint
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33
File Output Operator
Index: ql/src/test/results/clientpositive/subquery_multiinsert.q.out
===================================================================
--- ql/src/test/results/clientpositive/subquery_multiinsert.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/subquery_multiinsert.q.out (working copy)
@@ -139,25 +139,21 @@
1
handleSkewJoin: false
outputColumnNames: _col0, _col1
- Filter Operator
- predicate:
- expr: (1 = 1)
- type: boolean
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_4
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_4
Stage: Stage-0
Move Operator
@@ -261,7 +257,7 @@
outputColumnNames: _col0, _col1, _col4
Filter Operator
predicate:
- expr: ((1 = 1) and _col4 is null)
+ expr: _col4 is null
type: boolean
Select Operator
expressions:
Index: ql/src/test/results/clientpositive/union_view.q.out
===================================================================
--- ql/src/test/results/clientpositive/union_view.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/union_view.q.out (working copy)
@@ -87,7 +87,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
@@ -165,7 +165,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
@@ -243,7 +243,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
@@ -587,7 +587,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
@@ -623,11 +623,11 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
- expr: ds
+ expr: '1'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -659,11 +659,11 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
- expr: ds
+ expr: '1'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -747,11 +747,11 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
- expr: ds
+ expr: '2'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -783,7 +783,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
@@ -819,11 +819,11 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
- expr: ds
+ expr: '2'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -907,11 +907,11 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
- expr: ds
+ expr: '3'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -943,11 +943,11 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
- expr: ds
+ expr: '3'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -979,7 +979,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
@@ -1071,7 +1071,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
@@ -1113,7 +1113,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
@@ -1155,7 +1155,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
@@ -1365,7 +1365,7 @@
type: int
expr: value
type: string
- expr: ds
+ expr: '1'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -1398,7 +1398,7 @@
type: int
expr: value
type: string
- expr: ds
+ expr: '1'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -1498,7 +1498,7 @@
type: int
expr: value
type: string
- expr: ds
+ expr: '2'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -1560,7 +1560,7 @@
type: int
expr: value
type: string
- expr: ds
+ expr: '2'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -1660,7 +1660,7 @@
type: int
expr: value
type: string
- expr: ds
+ expr: '3'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -1693,7 +1693,7 @@
type: int
expr: value
type: string
- expr: ds
+ expr: '3'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -1825,11 +1825,11 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
- expr: ds
+ expr: '4'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -1861,11 +1861,11 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
- expr: ds
+ expr: '4'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -1897,7 +1897,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 86
type: int
expr: value
type: string
@@ -1986,7 +1986,7 @@
type: int
expr: value
type: string
- expr: ds
+ expr: '4'
type: string
outputColumnNames: _col0, _col1, _col2
Union
@@ -2019,7 +2019,7 @@
type: int
expr: value
type: string
- expr: ds
+ expr: '4'
type: string
outputColumnNames: _col0, _col1, _col2
Union
Index: ql/src/test/results/clientpositive/index_auto_multiple.q.out
===================================================================
--- ql/src/test/results/clientpositive/index_auto_multiple.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/index_auto_multiple.q.out (working copy)
@@ -120,7 +120,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '86'
type: string
expr: value
type: string
Index: ql/src/test/results/clientpositive/groupby_sort_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_sort_1.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/groupby_sort_1.q.out (working copy)
@@ -7821,8 +7821,9 @@
Stage-2 is a root stage
Stage-0 depends on stages: Stage-2
Stage-3 depends on stages: Stage-0
- Stage-1 depends on stages: Stage-2
- Stage-4 depends on stages: Stage-1
+ Stage-4 depends on stages: Stage-2
+ Stage-1 depends on stages: Stage-4
+ Stage-5 depends on stages: Stage-1
STAGE PLANS:
Stage: Stage-2
@@ -7837,7 +7838,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '8'
type: string
expr: val
type: string
@@ -7850,7 +7851,7 @@
Group By Operator
aggregations:
expr: count(1)
- bucketGroup: true
+ bucketGroup: false
keys:
expr: _col0
type: string
@@ -7884,25 +7885,15 @@
type: string
expr: _col1
type: string
- mode: final
+ mode: hash
outputColumnNames: _col0, _col1, _col2
- Select Operator
- expressions:
- expr: UDFToInteger(_col0)
- type: int
- expr: _col1
- type: string
- expr: UDFToInteger(_col2)
- type: int
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: true
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.dest2
+ File Output Operator
+ compressed: true
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Reduce Operator Tree:
Group By Operator
aggregations:
@@ -7942,6 +7933,57 @@
Stage: Stage-3
Stats-Aggr Operator
+ Stage: Stage-4
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ sort order: ++
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ tag: -1
+ value expressions:
+ expr: _col2
+ type: bigint
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations:
+ expr: count(VALUE._col0)
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions:
+ expr: UDFToInteger(_col0)
+ type: int
+ expr: _col1
+ type: string
+ expr: UDFToInteger(_col2)
+ type: int
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: true
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest2
+
Stage: Stage-1
Move Operator
tables:
@@ -7952,7 +7994,7 @@
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest2
- Stage: Stage-4
+ Stage: Stage-5
Stats-Aggr Operator
PREHOOK: query: FROM (select key, val from T2 where key = 8) x
Index: ql/src/test/results/clientpositive/index_auto_partitioned.q.out
===================================================================
--- ql/src/test/results/clientpositive/index_auto_partitioned.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/index_auto_partitioned.q.out (working copy)
@@ -110,7 +110,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '86'
type: string
expr: value
type: string
Index: ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (working copy)
@@ -8417,8 +8417,10 @@
Stage-3 depends on stages: Stage-2
Stage-0 depends on stages: Stage-3
Stage-4 depends on stages: Stage-0
- Stage-1 depends on stages: Stage-3
- Stage-5 depends on stages: Stage-1
+ Stage-5 depends on stages: Stage-2
+ Stage-6 depends on stages: Stage-5
+ Stage-1 depends on stages: Stage-6
+ Stage-7 depends on stages: Stage-1
STAGE PLANS:
Stage: Stage-2
@@ -8433,7 +8435,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '8'
type: string
expr: val
type: string
@@ -8446,7 +8448,7 @@
Group By Operator
aggregations:
expr: count(1)
- bucketGroup: true
+ bucketGroup: false
keys:
expr: _col0
type: string
@@ -8480,25 +8482,15 @@
type: string
expr: _col1
type: string
- mode: final
+ mode: hash
outputColumnNames: _col0, _col1, _col2
- Select Operator
- expressions:
- expr: UDFToInteger(_col0)
- type: int
- expr: _col1
- type: string
- expr: UDFToInteger(_col2)
- type: int
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: true
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.dest2
+ File Output Operator
+ compressed: true
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Reduce Operator Tree:
Group By Operator
aggregations:
@@ -8573,6 +8565,96 @@
Stage: Stage-4
Stats-Aggr Operator
+ Stage: Stage-5
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ sort order: ++
+ Map-reduce partition columns:
+ expr: rand()
+ type: double
+ tag: -1
+ value expressions:
+ expr: _col2
+ type: bigint
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations:
+ expr: count(VALUE._col0)
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ mode: partials
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: true
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-6
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ sort order: ++
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ tag: -1
+ value expressions:
+ expr: _col2
+ type: bigint
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations:
+ expr: count(VALUE._col0)
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ mode: final
+ outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions:
+ expr: UDFToInteger(_col0)
+ type: int
+ expr: _col1
+ type: string
+ expr: UDFToInteger(_col2)
+ type: int
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: true
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest2
+
Stage: Stage-1
Move Operator
tables:
@@ -8583,7 +8665,7 @@
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest2
- Stage: Stage-5
+ Stage: Stage-7
Stats-Aggr Operator
PREHOOK: query: FROM (select key, val from T2 where key = 8) x
Index: ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out
===================================================================
--- ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out (working copy)
@@ -136,7 +136,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '86'
type: string
expr: value
type: string
Index: ql/src/test/results/clientpositive/index_auto_file_format.q.out
===================================================================
--- ql/src/test/results/clientpositive/index_auto_file_format.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/index_auto_file_format.q.out (working copy)
@@ -96,7 +96,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '86'
type: string
expr: value
type: string
@@ -252,7 +252,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '86'
type: string
expr: value
type: string
Index: ql/src/test/results/clientpositive/index_auto_empty.q.out
===================================================================
--- ql/src/test/results/clientpositive/index_auto_empty.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/index_auto_empty.q.out (working copy)
@@ -70,7 +70,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '86'
type: string
expr: val
type: string
Index: ql/src/test/results/clientpositive/vectorization_div0.q.out
===================================================================
--- ql/src/test/results/clientpositive/vectorization_div0.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/vectorization_div0.q.out (working copy)
@@ -361,7 +361,7 @@
alias: alltypesorc
Filter Operator
predicate:
- expr: ((cdouble >= (- 500)) and (cdouble < (- 199)))
+ expr: ((cdouble >= -500) and (cdouble < -199))
type: boolean
Vectorized execution: true
Select Operator
Index: ql/src/test/results/clientpositive/type_widening.q.out
===================================================================
--- ql/src/test/results/clientpositive/type_widening.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/type_widening.q.out (working copy)
@@ -19,7 +19,7 @@
alias: src
Select Operator
expressions:
- expr: COALESCE(0,9223372036854775807)
+ expr: 0
type: bigint
outputColumnNames: _col0
Limit
Index: ql/src/test/results/clientpositive/udf_locate.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_locate.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf_locate.q.out (working copy)
@@ -67,31 +67,31 @@
Row Limit Per Split: 1
Select Operator
expressions:
- expr: locate('abc''abcd')
+ expr: 1
type: int
- expr: locate('ccc''abcabc')
+ expr: 0
type: int
- expr: locate('23'123)
+ expr: 2
type: int
- expr: locate(23123)
+ expr: 2
type: int
- expr: locate('abc''abcabc'2)
+ expr: 4
type: int
- expr: locate('abc''abcabc''2')
+ expr: 4
type: int
- expr: locate(1true)
+ expr: 0
type: int
- expr: locate(1false)
+ expr: 0
type: int
- expr: locate(UDFToByte('2')'12345')
+ expr: 2
type: int
- expr: locate('34'UDFToShort('12345'))
+ expr: 3
type: int
- expr: locate('456'UDFToLong('123456789012'))
+ expr: 4
type: int
- expr: locate('.25'UDFToFloat(1.25))
+ expr: 2
type: int
- expr: locate('.0'16.0)
+ expr: 3
type: int
expr: locate(null'abc')
type: int
@@ -99,7 +99,7 @@
type: int
expr: locate('abc''abcd'null)
type: int
- expr: locate('abc''abcd''invalid number')
+ expr: 0
type: int
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
ListSink
Index: ql/src/test/results/clientpositive/smb_mapjoin_25.q.out
===================================================================
--- ql/src/test/results/clientpositive/smb_mapjoin_25.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/smb_mapjoin_25.q.out (working copy)
@@ -59,16 +59,13 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: 5
type: int
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: 5
type: int
tag: 0
- value expressions:
- expr: key
- type: int
t1:b
TableScan
alias: b
@@ -78,11 +75,11 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: 5
type: int
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: 5
type: int
tag: 1
Reduce Operator Tree:
@@ -90,13 +87,12 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0}
+ 0
1
handleSkewJoin: false
- outputColumnNames: _col0
Select Operator
expressions:
- expr: _col0
+ expr: 5
type: int
outputColumnNames: _col0
File Output Operator
@@ -155,7 +151,7 @@
expressions:
expr: _col0
type: int
- expr: _col1
+ expr: 5
type: int
outputColumnNames: _col0, _col1
File Output Operator
@@ -178,16 +174,13 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: 5
type: int
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: 5
type: int
tag: 0
- value expressions:
- expr: key
- type: int
t2:d
TableScan
alias: d
@@ -197,11 +190,11 @@
type: boolean
Reduce Output Operator
key expressions:
- expr: key
+ expr: 5
type: int
sort order: +
Map-reduce partition columns:
- expr: key
+ expr: 5
type: int
tag: 1
Reduce Operator Tree:
@@ -209,13 +202,12 @@
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0}
+ 0
1
handleSkewJoin: false
- outputColumnNames: _col0
Select Operator
expressions:
- expr: _col0
+ expr: 5
type: int
outputColumnNames: _col0
File Output Operator
@@ -250,85 +242,168 @@
(TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL a) key) 5)))) t1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME smb_bucket_2) c) (TOK_TABREF (TOK_TABNAME smb_bucket_3) d) (= (. (TOK_TABLE_OR_COL c) key) (. (TOK_TABLE_OR_COL d) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) key))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL c) key) 5)))) t2) (= (. (TOK_TABLE_OR_COL t1) key) (. (TOK_TABLE_OR_COL t2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL t2) key) 5))))
STAGE DEPENDENCIES:
- Stage-1 is a root stage
+ Stage-11 is a root stage
+ Stage-8 depends on stages: Stage-11
+ Stage-7 depends on stages: Stage-8, Stage-9 , consists of Stage-10, Stage-2
+ Stage-10 has a backup stage: Stage-2
+ Stage-6 depends on stages: Stage-10
+ Stage-2
+ Stage-12 is a root stage
+ Stage-9 depends on stages: Stage-12
Stage-0 is a root stage
STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- t1:a
+ Stage: Stage-11
+ Map Reduce Local Work
+ Alias -> Map Local Tables:
+ t1:b
+ Fetch Operator
+ limit: -1
+ Alias -> Map Local Operator Tree:
+ t1:b
TableScan
- alias: a
+ alias: b
Filter Operator
predicate:
expr: (key = 5)
type: boolean
- Sorted Merge Bucket Map Join Operator
- condition map:
- Inner Join 0 to 1
+ HashTable Sink Operator
condition expressions:
- 0 {key}
+ 0
1
handleSkewJoin: false
keys:
- 0 [Column[key]]
- 1 [Column[key]]
- outputColumnNames: _col0
+ 0 [Const int 5]
+ 1 [Const int 5]
Position of Big Table: 0
- Select Operator
- expressions:
- expr: _col0
- type: int
- outputColumnNames: _col0
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: int
- sort order: +
- Map-reduce partition columns:
- expr: _col0
- type: int
- tag: 0
- value expressions:
- expr: _col0
- type: int
- t2:c
+
+ Stage: Stage-8
+ Map Reduce
+ Alias -> Map Operator Tree:
+ t1:a
TableScan
- alias: c
+ alias: a
Filter Operator
predicate:
expr: (key = 5)
type: boolean
- Sorted Merge Bucket Map Join Operator
+ Map Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {key}
+ 0
1
handleSkewJoin: false
keys:
- 0 [Column[key]]
- 1 [Column[key]]
- outputColumnNames: _col0
+ 0 [Const int 5]
+ 1 [Const int 5]
Position of Big Table: 0
Select Operator
expressions:
+ expr: 5
+ type: int
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ Local Work:
+ Map Reduce Local Work
+
+ Stage: Stage-7
+ Conditional Operator
+
+ Stage: Stage-10
+ Map Reduce Local Work
+ Alias -> Map Local Tables:
+ $INTNAME1
+ Fetch Operator
+ limit: -1
+ Alias -> Map Local Operator Tree:
+ $INTNAME1
+ TableScan
+ HashTable Sink Operator
+ condition expressions:
+ 0 {_col0}
+ 1 {_col0}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[_col0]]
+ 1 [Column[_col0]]
+ Position of Big Table: 0
+
+ Stage: Stage-6
+ Map Reduce
+ Alias -> Map Operator Tree:
+ $INTNAME
+ TableScan
+ Map Join Operator
+ condition map:
+ Left Outer Join0 to 1
+ condition expressions:
+ 0 {_col0}
+ 1 {_col0}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[_col0]]
+ 1 [Column[_col0]]
+ outputColumnNames: _col0, _col1
+ Position of Big Table: 0
+ Filter Operator
+ predicate:
+ expr: (_col1 = 5)
+ type: boolean
+ Select Operator
+ expressions:
expr: _col0
type: int
- outputColumnNames: _col0
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: int
- sort order: +
- Map-reduce partition columns:
- expr: _col0
- type: int
- tag: 1
- value expressions:
- expr: _col0
- type: int
+ expr: 5
+ type: int
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Local Work:
+ Map Reduce Local Work
+
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+ $INTNAME
+ TableScan
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: int
+ tag: 0
+ value expressions:
+ expr: _col0
+ type: int
+ $INTNAME1
+ TableScan
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: int
+ tag: 1
+ value expressions:
+ expr: _col0
+ type: int
Reduce Operator Tree:
Join Operator
condition map:
@@ -346,7 +421,7 @@
expressions:
expr: _col0
type: int
- expr: _col1
+ expr: 5
type: int
outputColumnNames: _col0, _col1
File Output Operator
@@ -357,6 +432,66 @@
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Stage: Stage-12
+ Map Reduce Local Work
+ Alias -> Map Local Tables:
+ t2:c
+ Fetch Operator
+ limit: -1
+ Alias -> Map Local Operator Tree:
+ t2:c
+ TableScan
+ alias: c
+ Filter Operator
+ predicate:
+ expr: (key = 5)
+ type: boolean
+ HashTable Sink Operator
+ condition expressions:
+ 0
+ 1
+ handleSkewJoin: false
+ keys:
+ 0 [Const int 5]
+ 1 [Const int 5]
+ Position of Big Table: 1
+
+ Stage: Stage-9
+ Map Reduce
+ Alias -> Map Operator Tree:
+ t2:d
+ TableScan
+ alias: d
+ Filter Operator
+ predicate:
+ expr: (key = 5)
+ type: boolean
+ Map Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0
+ 1
+ handleSkewJoin: false
+ keys:
+ 0 [Const int 5]
+ 1 [Const int 5]
+ Position of Big Table: 1
+ Select Operator
+ expressions:
+ expr: 5
+ type: int
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+ Local Work:
+ Map Reduce Local Work
+
Stage: Stage-0
Fetch Operator
limit: -1
Index: ql/src/test/results/clientpositive/groupby_ppd.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_ppd.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/groupby_ppd.q.out (working copy)
@@ -29,7 +29,7 @@
type: boolean
Select Operator
expressions:
- expr: bar
+ expr: 1
type: int
expr: foo
type: int
@@ -73,7 +73,7 @@
type: boolean
Select Operator
expressions:
- expr: bar
+ expr: 1
type: int
expr: foo
type: int
Index: ql/src/test/results/clientpositive/macro.q.out
===================================================================
--- ql/src/test/results/clientpositive/macro.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/macro.q.out (working copy)
@@ -30,7 +30,7 @@
alias: src
Select Operator
expressions:
- expr: SIGMOID(2)
+ expr: 0.8807970779778823
type: double
outputColumnNames: _col0
Limit
@@ -58,7 +58,7 @@
GatherStats: false
Select Operator
expressions:
- expr: SIGMOID(2)
+ expr: 0.8807970779778823
type: double
outputColumnNames: _col0
Statistics:
@@ -104,7 +104,7 @@
alias: src
Select Operator
expressions:
- expr: (FIXED_NUMBER() + 1)
+ expr: 2
type: int
outputColumnNames: _col0
Limit
@@ -132,7 +132,7 @@
GatherStats: false
Select Operator
expressions:
- expr: (FIXED_NUMBER() + 1)
+ expr: 2
type: int
outputColumnNames: _col0
Statistics:
@@ -199,7 +199,7 @@
alias: src
Select Operator
expressions:
- expr: SIMPLE_ADD(1, 9)
+ expr: 10
type: int
outputColumnNames: _col0
Limit
@@ -227,7 +227,7 @@
GatherStats: false
Select Operator
expressions:
- expr: SIMPLE_ADD(1, 9)
+ expr: 10
type: int
outputColumnNames: _col0
Statistics:
Index: ql/src/test/results/clientpositive/lateral_view_ppd.q.out
===================================================================
--- ql/src/test/results/clientpositive/lateral_view_ppd.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/lateral_view_ppd.q.out (working copy)
@@ -112,16 +112,14 @@
Lateral View Forward
Select Operator
expressions:
- expr: key
- type: string
expr: value
type: string
- outputColumnNames: key, value
+ outputColumnNames: value
Lateral View Join Operator
- outputColumnNames: _col0, _col1, _col4
+ outputColumnNames: _col1, _col4
Filter Operator
predicate:
- expr: ((_col0 = '0') and (_col4 = 1))
+ expr: (_col4 = 1)
type: boolean
Select Operator
expressions:
@@ -145,10 +143,10 @@
UDTF Operator
function name: explode
Lateral View Join Operator
- outputColumnNames: _col0, _col1, _col4
+ outputColumnNames: _col1, _col4
Filter Operator
predicate:
- expr: ((_col0 = '0') and (_col4 = 1))
+ expr: (_col4 = 1)
type: boolean
Select Operator
expressions:
Index: ql/src/test/results/clientpositive/udf6.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf6.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf6.q.out (working copy)
@@ -35,7 +35,7 @@
alias: dest1
Select Operator
expressions:
- expr: if(true, 1, 2)
+ expr: 1
type: int
outputColumnNames: _col0
File Output Operator
@@ -93,21 +93,21 @@
alias: dest1
Select Operator
expressions:
- expr: if(true, 1, 2)
+ expr: 1
type: int
- expr: if(false, 1, 2)
+ expr: 2
type: int
expr: if(null, 1, 2)
type: int
- expr: if(true, 'a', 'b')
+ expr: 'a'
type: string
- expr: if(true, 0.1, 0.2)
+ expr: 0.1
type: double
- expr: if(false, UDFToLong(1), UDFToLong(2))
+ expr: 2
type: bigint
- expr: if(false, UDFToByte(127), UDFToByte(126))
+ expr: 126
type: tinyint
- expr: if(false, UDFToShort(127), UDFToShort(128))
+ expr: 128
type: smallint
expr: 128
type: int
Index: ql/src/test/results/clientpositive/udf1.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf1.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/udf1.q.out (working copy)
@@ -55,45 +55,45 @@
type: boolean
Select Operator
expressions:
- expr: ('a' like '%a%')
+ expr: true
type: boolean
- expr: ('b' like '%a%')
+ expr: false
type: boolean
- expr: ('ab' like '%a%')
+ expr: true
type: boolean
- expr: ('ab' like '%a_')
+ expr: true
type: boolean
- expr: ('%_' like '\%\_')
+ expr: true
type: boolean
- expr: ('ab' like '\%\_')
+ expr: false
type: boolean
- expr: ('ab' like '_a%')
+ expr: false
type: boolean
- expr: ('ab' like 'a')
+ expr: false
type: boolean
- expr: ('' rlike '.*')
+ expr: true
type: boolean
- expr: ('a' rlike '[ab]')
+ expr: true
type: boolean
- expr: ('' rlike '[ab]')
+ expr: false
type: boolean
- expr: ('hadoop' rlike '[a-z]*')
+ expr: true
type: boolean
- expr: ('hadoop' rlike 'o*')
+ expr: true
type: boolean
- expr: regexp_replace('abc', 'b', 'c')
+ expr: 'acc'
type: string
- expr: regexp_replace('abc', 'z', 'a')
+ expr: 'abc'
type: string
- expr: regexp_replace('abbbb', 'bb', 'b')
+ expr: 'abb'
type: string
- expr: regexp_replace('hadoop', '(.)[a-z]*', '$1ive')
+ expr: 'hive'
type: string
- expr: regexp_replace('hadoopAAA', 'A.*', '')
+ expr: 'hadoop'
type: string
- expr: regexp_replace('abc', '', 'A')
+ expr: 'AaAbAcA'
type: string
- expr: ('abc' rlike '')
+ expr: false
type: boolean
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
File Output Operator
Index: ql/src/test/results/clientpositive/set_variable_sub.q.out
===================================================================
--- ql/src/test/results/clientpositive/set_variable_sub.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/set_variable_sub.q.out (working copy)
@@ -22,7 +22,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 'value1'
type: string
expr: value
type: string
@@ -63,7 +63,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 'value1'
type: string
expr: value
type: string
@@ -104,7 +104,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: '1'
type: string
expr: value
type: string
Index: ql/src/test/results/clientpositive/stats_empty_dyn_part.q.out
===================================================================
--- ql/src/test/results/clientpositive/stats_empty_dyn_part.q.out (revision 1558643)
+++ ql/src/test/results/clientpositive/stats_empty_dyn_part.q.out (working copy)
@@ -41,7 +41,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 'no_such_value'
type: string
expr: value
type: string
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java (revision 0)
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.optimizer;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
+import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
+import org.apache.hadoop.hive.ql.lib.Dispatcher;
+import org.apache.hadoop.hive.ql.lib.GraphWalker;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.lib.NodeProcessor;
+import org.apache.hadoop.hive.ql.lib.Rule;
+import org.apache.hadoop.hive.ql.lib.RuleRegExp;
+import org.apache.hadoop.hive.ql.parse.OpParseContext;
+import org.apache.hadoop.hive.ql.parse.ParseContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+
+/**
+ * Implementation of one of the rule-based optimization steps. ConstantPropagate traverse the DAG
+ * from root to child. For each conditional expression, process as follows:
+ *
+ * 1. Fold constant expression: if the expression is a UDF and all parameters are constant.
+ *
+ * 2. Shortcut expression: if the expression is a logical operator and it can be shortcut by
+ * some constants of its parameters.
+ *
+ * 3. Propagate expression: if the expression is an assignment like column=constant, the expression
+ * will be propagate to parents to see if further folding operation is possible.
+ */
+public class ConstantPropagate implements Transform {
+
+ private static final Log LOG = LogFactory.getLog(ConstantPropagate.class);
+ protected ParseContext pGraphContext;
+ private Map, OpParseContext> opToParseCtxMap;
+
+ public ConstantPropagate() {
+ }
+
+ /**
+ * Transform the query tree.
+ *
+ * @param pactx
+ * the current parse context
+ */
+ public ParseContext transform(ParseContext pactx) throws SemanticException {
+ pGraphContext = pactx;
+ opToParseCtxMap = pGraphContext.getOpParseCtx();
+
+ // generate pruned column list for all relevant operators
+ ConstantPropagateProcCtx cppCtx = new ConstantPropagateProcCtx(opToParseCtxMap);
+
+ // create a walker which walks the tree in a DFS manner while maintaining
+ // the operator stack. The dispatcher
+ // generates the plan from the operator tree
+ Map opRules = new LinkedHashMap();
+ opRules.put(new RuleRegExp("R1", "FIL%"), ConstantPropagateProcFactory
+ .getFilterProc());
+ opRules.put(new RuleRegExp("R2", "GBY%"), ConstantPropagateProcFactory
+ .getGroupByProc());
+ opRules.put(new RuleRegExp("R3", "SEL%"), ConstantPropagateProcFactory
+ .getSelectProc());
+ opRules.put(new RuleRegExp("R4", "FS%"), ConstantPropagateProcFactory
+ .getFileSinkProc());
+ opRules.put(new RuleRegExp("R5", "UNION%"), ConstantPropagateProcFactory
+ .getUnionProc());
+ opRules.put(new RuleRegExp("R5", "RS%"), ConstantPropagateProcFactory
+ .getReduceSinkProc());
+
+ // The dispatcher fires the processor corresponding to the closest matching
+ // rule and passes the context along
+ Dispatcher disp = new DefaultRuleDispatcher(ConstantPropagateProcFactory
+ .getDefaultProc(), opRules, cppCtx);
+ GraphWalker ogw = new ConstantPropagateWalker(disp);
+
+ // Create a list of operator nodes to start the walking.
+ ArrayList topNodes = new ArrayList();
+ topNodes.addAll(pGraphContext.getTopOps().values());
+ ogw.startWalking(topNodes, null);
+ for (Operator extends Serializable> opToDelete : cppCtx.getOpToDelete()) {
+ if (opToDelete.getParentOperators() == null || opToDelete.getParentOperators().size() != 1) {
+ throw new RuntimeException("Error pruning operator " + opToDelete
+ + ". It should have only 1 parent.");
+ }
+ opToDelete.getParentOperators().get(0).removeChildAndAdoptItsChildren(opToDelete);
+ }
+ return pGraphContext;
+ }
+
+
+ /**
+ * Walks the op tree in post-order.
+ */
+ public static class ConstantPropagateWalker extends DefaultGraphWalker {
+
+ public ConstantPropagateWalker(Dispatcher disp) {
+ super(disp);
+ }
+
+ public void walk(Node nd) throws SemanticException {
+
+ List parents = ((Operator) nd).getParentOperators();
+ if ((parents == null)
+ || getDispatchedList().containsAll(parents)) {
+ opStack.push(nd);
+
+ // all children are done or no need to walk the children
+ dispatch(nd, opStack);
+ opStack.pop();
+ } else {
+ getToWalk().removeAll(parents);
+ getToWalk().add(0, nd);
+ getToWalk().addAll(0, parents);
+ return;
+ }
+
+ // move all the children to the front of queue
+ List extends Node> children = nd.getChildren();
+ if (children != null) {
+ getToWalk().removeAll(children);
+ getToWalk().addAll(children);
+ }
+ }
+ }
+
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java (revision 0)
@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.optimizer;
+
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
+import org.apache.hadoop.hive.ql.parse.OpParseContext;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+
+/**
+ * This class implements the processor context for Constant Propagate.
+ *
+ * ConstantPropagateProcCtx keeps track of propagated constants in a column->const map for each
+ * operator, enabling constants to be revolved across operators.
+ */
+public class ConstantPropagateProcCtx implements NodeProcessorCtx {
+
+ private static final org.apache.commons.logging.Log LOG = LogFactory
+ .getLog(ConstantPropagateProcCtx.class);
+
+ private final Map, Map> opToConstantExprs;
+
+ private final Map, OpParseContext> opToParseCtxMap;
+
+ private final List> opToDelete;
+
+ public ConstantPropagateProcCtx(
+ Map, OpParseContext> opToParseCtxMap) {
+ opToConstantExprs = new HashMap, Map>();
+ opToDelete = new ArrayList>();
+ this.opToParseCtxMap = opToParseCtxMap;
+ }
+
+ public Map, Map> getOpToConstantExprs() {
+ return opToConstantExprs;
+ }
+
+ public Map, OpParseContext> getOpToParseCtxMap() {
+ return opToParseCtxMap;
+ }
+
+ /**
+ * Get propagated constant map from parents.
+ *
+ * Traverse all parents of current operator, if there is propagated constant (determined by
+ * assignment expression like column=constant value), resolve the column using RowResolver and add
+ * it to current constant map.
+ *
+ * @param op
+ * operator getting the propagated constants.
+ * @return map of ColumnInfo to ExprNodeDesc. The values of that map must be either
+ * ExprNodeConstantDesc or ExprNodeNullDesc.
+ */
+ public Map getPropagatedConstants(
+ Operator extends Serializable> op) {
+ Map constants = new HashMap();
+ RowResolver rr = opToParseCtxMap.get(op).getRowResolver();
+ LOG.debug("Current rr of op:" + op + " is " + rr);
+ try {
+ if (op.getParentOperators() == null) {
+ return constants;
+ }
+ for (Operator extends Serializable> parent : op.getParentOperators()) {
+ RowResolver inputRR = opToParseCtxMap.get(parent).getRowResolver();
+ Map c = opToConstantExprs.get(parent);
+ for (Entry e : c.entrySet()) {
+ ColumnInfo ci = e.getKey();
+
+ // Get genuine column alias (instead of retrieve from ColumnInfo, which is unreliable).
+ String tmp[] = inputRR.reverseLookup(ci.getInternalName());
+
+ // In some cases reverseLookup by internal name will fail (e.g., items insert by
+ // RowResolver.putExpression)
+ if (tmp != null) {
+ ColumnInfo nci = rr.get(tmp[0], tmp[1]);
+ if (nci != null) {
+ constants.put(nci, e.getValue());
+ }
+ }
+ }
+ }
+ LOG.debug("Offerring constants " + StringUtils.join(constants.keySet(), ",")
+ + " to operator " + op.toString());
+ return constants;
+ } catch (SemanticException e) {
+ LOG.error(e.getMessage(), e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Get ColumnInfo from column expression.
+ *
+ * @param op
+ * @param desc
+ * @return
+ */
+ public ColumnInfo resolveColumn(Operator extends Serializable> op,
+ ExprNodeColumnDesc desc) {
+ try {
+ ColumnInfo ci = null;
+ for (Operator extends Serializable> parent : op.getParentOperators()) {
+ RowResolver rr = opToParseCtxMap.get(parent).getRowResolver();
+ String[] tmp = rr.reverseLookup(desc.getColumn());
+ if (tmp == null) {
+ LOG.debug("Reverse look up of column " + desc + " error!");
+ return null;
+ }
+ ci = rr.get(tmp[0], tmp[1]);
+ return ci;
+ }
+ return null;
+ } catch (SemanticException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public void addOpToDelete(Operator extends Serializable> op) {
+ opToDelete.add(op);
+ }
+
+ public List> getOpToDelete() {
+ return opToDelete;
+ }
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (revision 1558643)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (working copy)
@@ -395,6 +395,9 @@
for (String childCol : childJoinCols) {
ExprNodeDesc desc = exprMap.get(childCol);
int index = conf.getValueCols().indexOf(desc);
+ if (index < 0) {
+ continue;
+ }
flags[index] = true;
String[] nm = redSinkRR.reverseLookup(childCol);
if (nm != null) {
@@ -557,7 +560,6 @@
Object... nodeOutputs) throws SemanticException {
SelectOperator op = (SelectOperator) nd;
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
-
LateralViewJoinOperator lvJoin = null;
if (op.getChildOperators() != null) {
for (Operator extends OperatorDesc> child : op.getChildOperators()) {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java (revision 0)
@@ -0,0 +1,766 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.optimizer;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Stack;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.GroupByOperator;
+import org.apache.hadoop.hive.ql.exec.JoinOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
+import org.apache.hadoop.hive.ql.exec.RowSchema;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UnionOperator;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.lib.NodeProcessor;
+import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
+import org.apache.hadoop.hive.ql.plan.GroupByDesc;
+import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
+import org.apache.hadoop.hive.ql.plan.JoinDesc;
+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * Factory for generating the different node processors used by ConstantPropagate.
+ */
+public final class ConstantPropagateProcFactory {
+ protected static final Log LOG = LogFactory.getLog(ConstantPropagateProcFactory.class.getName());
+ protected static Set> propagatableUdfs = new HashSet>();
+
+ static {
+ propagatableUdfs.add(GenericUDFOPAnd.class);
+ };
+
+
+ private ConstantPropagateProcFactory() {
+ // prevent instantiation
+ }
+
+ /**
+ * Cast type from expression type to expected type ti.
+ *
+ * @param desc
+ * constant expression
+ * @param ti
+ * expected type info
+ * @return cast constant, or null if the type cast failed.
+ */
+ private static ExprNodeConstantDesc typeCast(ExprNodeDesc desc, TypeInfo ti) {
+ LOG.debug("Casting " + desc + " to type " + ti);
+ PrimitiveTypeInfo priti = (PrimitiveTypeInfo) ti;
+ if (priti.getPrimitiveCategory() == PrimitiveCategory.DECIMAL
+ || priti.getPrimitiveCategory() == PrimitiveCategory.VARCHAR
+ || priti.getPrimitiveCategory() == PrimitiveCategory.CHAR) {
+
+ // FIXME: support template types. It currently has conflict with ExprNodeConstantDesc
+ return null;
+ }
+ ExprNodeConstantDesc c = (ExprNodeConstantDesc) desc;
+ ObjectInspector origOI = TypeInfoUtils
+ .getStandardJavaObjectInspectorFromTypeInfo(desc.getTypeInfo());
+ ObjectInspector oi = TypeInfoUtils
+ .getStandardJavaObjectInspectorFromTypeInfo(desc.getTypeInfo());
+ Converter converter = ObjectInspectorConverters.getConverter(oi, origOI);
+ Object convObj = converter.convert(c.getValue());
+
+ // Convert integer related types because converters are not sufficient
+ if (convObj instanceof Integer) {
+ switch (priti.getPrimitiveCategory()) {
+ case BYTE:
+ convObj = new Byte((byte) (((Integer) convObj).intValue()));
+ break;
+ case SHORT:
+ convObj = new Short((short) ((Integer) convObj).intValue());
+ break;
+ case LONG:
+ convObj = new Long(((Integer) convObj).intValue());
+ default:
+ }
+ }
+ return new ExprNodeConstantDesc(ti, convObj);
+ }
+
+ /**
+ * Fold input expression desc.
+ *
+ * If desc is a UDF and all parameters are constants, evaluate it. If desc is a column expression,
+ * find it from propagated constants, and if there is, replace it with constant.
+ *
+ * @param desc
+ * folding expression
+ * @param constants
+ * current propagated constant map
+ * @param cppCtx
+ * @param op
+ * processing operator
+ * @param propagate
+ * if true, assignment expressions will be added to constants.
+ * @return fold expression
+ */
+ public static ExprNodeDesc foldExpr(ExprNodeDesc desc,
+ Map constants,
+ ConstantPropagateProcCtx cppCtx, Operator extends Serializable> op, boolean propagate) {
+ if (desc instanceof ExprNodeGenericFuncDesc) {
+ ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) desc;
+
+ // The function must be deterministic, or we can't fold it.
+ GenericUDF udf = funcDesc.getGenericUDF();
+ if (!isDeterministicUdf(udf)) {
+ LOG.debug("Function " + udf.getClass() + " undeterministic, quit folding.");
+ return desc;
+ }
+
+ boolean propagateNext = propagate;
+ if (!propagatableUdfs.contains(udf.getClass())) {
+ propagateNext = false;
+ }
+ List newExprs = new ArrayList();
+ for (ExprNodeDesc childExpr : desc.getChildren()) {
+ newExprs.add(foldExpr(childExpr, constants, cppCtx, op, propagateNext));
+ }
+
+ // If all child expressions are constants, evaluate UDF immediately
+ ExprNodeDesc constant = evaluateFunction(udf, newExprs, desc.getChildren());
+ if (constant != null) {
+ LOG.debug("Folding expression:" + desc + " -> " + constant);
+ return constant;
+ } else {
+
+ // Check if the function can be short cut.
+ ExprNodeDesc shortcut = shortcutFunction(udf, newExprs);
+ if (shortcut != null) {
+ LOG.debug("Folding expression:" + desc + " -> " + shortcut);
+ return shortcut;
+ }
+ ((ExprNodeGenericFuncDesc) desc).setChildren(newExprs);
+ }
+
+ // If in some selected binary operators (=, is null, etc), one of the expressions are
+ // constant, add them to colToConstatns as half-deterministic columns.
+ if (propagate) {
+ propagate(udf, newExprs, cppCtx, op, constants);
+ }
+
+ return desc;
+ } else if (desc instanceof ExprNodeColumnDesc) {
+ ExprNodeDesc col = evaluateColumn((ExprNodeColumnDesc) desc, cppCtx, op);
+ LOG.debug("Folding expression:" + desc + " -> " + col);
+ return col;
+ } else {
+ return desc;
+ }
+ }
+
+ private static boolean isDeterministicUdf(GenericUDF udf) {
+ UDFType udfType = udf.getClass().getAnnotation(UDFType.class);
+ if (udf instanceof GenericUDFBridge) {
+ udfType = ((GenericUDFBridge) udf).getUdfClass().getAnnotation(UDFType.class);
+ }
+ if (udfType.deterministic() == false) {
+ return false;
+ }
+
+ // If udf is requiring additional jars, we can't determine the result in compile time.
+ String[] files;
+ String[] jars;
+ if (udf instanceof GenericUDFBridge) {
+ GenericUDFBridge bridge = (GenericUDFBridge) udf;
+ String udfClassName = bridge.getUdfClassName();
+ try {
+ UDF udfInternal = (UDF) Class.forName(bridge.getUdfClassName(), true, JavaUtils.getClassLoader()).newInstance();
+ files = udfInternal.getRequiredFiles();
+ jars = udf.getRequiredJars();
+ } catch (Exception e) {
+ LOG.error(
+ "The UDF implementation class '" + udfClassName
+ + "' is not present in the class path");
+ return false;
+ }
+ } else {
+ files = udf.getRequiredFiles();
+ jars = udf.getRequiredJars();
+ }
+ if (files != null || jars != null) {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Propagate assignment expression, adding an entry into constant map constants.
+ *
+ * @param udf
+ * expression UDF, currently only 2 UDFs are supported: '=' and 'is null'.
+ * @param newExprs
+ * child expressions (parameters).
+ * @param cppCtx
+ * @param op
+ * @param constants
+ */
+ private static void propagate(GenericUDF udf, List newExprs,
+ ConstantPropagateProcCtx cppCtx, Operator extends Serializable> op,
+ Map constants) {
+ if (udf instanceof GenericUDFOPEqual) {
+ ExprNodeDesc lOperand = newExprs.get(0);
+ ExprNodeDesc rOperand = newExprs.get(1);
+ if (lOperand instanceof ExprNodeColumnDesc && rOperand instanceof ExprNodeConstantDesc) {
+ LOG.debug("Filter " + udf
+ + " is identified as a value assignment, propagate it.");
+ ExprNodeColumnDesc c = (ExprNodeColumnDesc) lOperand;
+ ExprNodeConstantDesc v = (ExprNodeConstantDesc) rOperand;
+ ColumnInfo ci = cppCtx.resolveColumn(op, c);
+ if (ci != null) {
+ constants.put(ci, v);
+ }
+ }
+ } else if (udf instanceof GenericUDFOPNull) {
+ ExprNodeDesc operand = newExprs.get(0);
+ if (operand instanceof ExprNodeColumnDesc) {
+ LOG.debug("Filter " + udf
+ + " is identified as a value assignment, propagate it.");
+ ExprNodeColumnDesc c = (ExprNodeColumnDesc) operand;
+ ColumnInfo ci = cppCtx.resolveColumn(op, c);
+ if (ci != null) {
+ constants.put(ci, new ExprNodeNullDesc());
+ }
+ }
+ }
+ }
+
+ private static ExprNodeDesc shortcutFunction(GenericUDF udf, List newExprs) {
+ if (udf instanceof GenericUDFOPAnd) {
+ for (int i = 0; i < 2; i++) {
+ ExprNodeDesc childExpr = newExprs.get(i);
+ if (childExpr instanceof ExprNodeConstantDesc) {
+ ExprNodeConstantDesc c = (ExprNodeConstantDesc) childExpr;
+ if (c.getValue() == Boolean.TRUE) {
+
+ // if true, prune it
+ return newExprs.get(Math.abs(i - 1));
+ } else {
+
+ // if false return false
+ return childExpr;
+ }
+ }
+ }
+ }
+
+ if (udf instanceof GenericUDFOPOr) {
+ for (int i = 0; i < 2; i++) {
+ ExprNodeDesc childExpr = newExprs.get(i);
+ if (childExpr instanceof ExprNodeConstantDesc) {
+ ExprNodeConstantDesc c = (ExprNodeConstantDesc) childExpr;
+ if (c.getValue() == Boolean.FALSE) {
+
+ // if false, prune it
+ return newExprs.get(Math.abs(i - 1));
+ } else {
+
+ // if true return true
+ return childExpr;
+ }
+ }
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Evaluate column, replace the deterministic columns with constants if possible
+ *
+ * @param desc
+ * @param ctx
+ * @param op
+ * @param colToConstants
+ * @return
+ */
+ private static ExprNodeDesc evaluateColumn(ExprNodeColumnDesc desc,
+ ConstantPropagateProcCtx cppCtx, Operator extends Serializable> op) {
+ try {
+ ColumnInfo ci = null;
+ for (Operator extends Serializable> parent : op.getParentOperators()) {
+ RowResolver rr = cppCtx.getOpToParseCtxMap().get(parent).getRowResolver();
+ String[] tmp = rr.reverseLookup(desc.getColumn());
+ if (tmp == null) {
+ LOG.error("Reverse look up of column " + desc + " error!");
+ return desc;
+ }
+ ci = rr.get(tmp[0], tmp[1]);
+ if (ci != null) {
+ ExprNodeDesc constant = cppCtx.getOpToConstantExprs().get(parent).get(ci);
+ if (constant != null) {
+ LOG.debug("Fold column " + ci + " to be " + constant);
+ if (constant instanceof ExprNodeConstantDesc
+ && !constant.getTypeInfo().equals(desc.getTypeInfo())) {
+ ExprNodeDesc expr = typeCast(constant, desc.getTypeInfo());
+ if (expr == null) {
+ return desc;
+ } else {
+ return expr;
+ }
+ }
+ return constant;
+ } else {
+ return desc;
+ }
+ }
+ }
+ LOG.error("Can't resolve " + desc.getTabAlias() + "." + desc.getColumn());
+ throw new RuntimeException("Can't resolve " + desc.getTabAlias() + "." + desc.getColumn());
+ } catch (SemanticException e) {
+ throw new RuntimeException(e);
+ }
+
+ }
+
+ /**
+ * Evaluate UDF
+ *
+ * @param udf
+ * UDF object
+ * @param exprs
+ * @param oldExprs
+ * @return null if expression cannot be evaluated (not all parameters are constants). Or evaluated
+ * ExprNodeConstantDesc if possible.
+ * @throws HiveException
+ */
+ private static ExprNodeDesc evaluateFunction(GenericUDF udf, List exprs,
+ List oldExprs) {
+ DeferredJavaObject[] arguments = new DeferredJavaObject[exprs.size()];
+ ObjectInspector[] argois = new ObjectInspector[exprs.size()];
+ for (int i = 0; i < exprs.size(); i++) {
+ ExprNodeDesc desc = exprs.get(i);
+ if (desc instanceof ExprNodeConstantDesc) {
+ ExprNodeConstantDesc constant = (ExprNodeConstantDesc) exprs.get(i);
+ if (!constant.getTypeInfo().equals(oldExprs.get(i).getTypeInfo())) {
+ constant = typeCast(constant, oldExprs.get(i).getTypeInfo());
+ if (constant == null) {
+ return null;
+ }
+ }
+ Object value = constant.getValue();
+ PrimitiveTypeInfo pti = (PrimitiveTypeInfo) constant.getTypeInfo();
+ Object writableValue = PrimitiveObjectInspectorFactory
+ .getPrimitiveJavaObjectInspector(pti).getPrimitiveWritableObject(
+ value);
+ arguments[i] = new DeferredJavaObject(writableValue);
+ argois[i] = ObjectInspectorUtils.getConstantObjectInspector(
+ constant.getWritableObjectInspector(), writableValue);
+ } else if (desc instanceof ExprNodeNullDesc) {
+
+ // FIXME: add null support.
+ return null;
+ } else {
+ return null;
+ }
+ }
+
+ try {
+ ObjectInspector oi = udf.initialize(argois);
+ Object o = udf.evaluate(arguments);
+ LOG.debug(udf.getClass().getName() + "(" + exprs + ")=" + o);
+ if (o == null) {
+ return new ExprNodeNullDesc();
+ }
+ Class clz = o.getClass();
+ if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(clz)) {
+ PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
+ TypeInfo typeInfo = poi.getTypeInfo();
+
+ // Handling parameterized types (varchar, decimal, etc).
+ if (typeInfo.getTypeName().contains(serdeConstants.DECIMAL_TYPE_NAME)
+ || typeInfo.getTypeName().contains(serdeConstants.VARCHAR_TYPE_NAME)
+ || typeInfo.getTypeName().contains(serdeConstants.CHAR_TYPE_NAME)) {
+
+ // Do not support parameterized types.
+ return null;
+ }
+ o = poi.getPrimitiveJavaObject(o);
+ } else if (PrimitiveObjectInspectorUtils.isPrimitiveJavaClass(clz)) {
+
+ } else {
+ LOG.error("Unable to evaluate " + udf + ". Return value unrecoginizable.");
+ return null;
+ }
+ return new ExprNodeConstantDesc(o);
+ } catch (HiveException e) {
+ LOG.error("Evaluation function " + udf.getClass()
+ + " failed in Constant Propagatation Optimizer.");
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Change operator row schema, replace column with constant if it is.
+ *
+ * @param op
+ * @param constants
+ * @throws SemanticException
+ */
+ private static void foldOperator(
+ Operator extends Serializable> op,
+ Map constants)
+ throws SemanticException {
+ RowSchema schema = op.getSchema();
+ if (schema != null) {
+ ArrayList cols = schema.getSignature();
+ for (int i = 0; i < cols.size(); i++) {
+ ColumnInfo col = cols.get(i);
+ ExprNodeDesc constant = constants.get(col);
+ if (constant != null) {
+ LOG.debug("Replacing column " + col + " with constant "
+ + constant.getExprString() + " in " + op);
+ col.setObjectinspector(constant.getWritableObjectInspector());
+ }
+ }
+ }
+ }
+
+ /**
+ * Node Processor for Constant Propagation on Filter Operators.
+ * The processor is to fold conditional expressions and extract assignment expressions and
+ * propagate them.
+ */
+ public static class ConstantPropagateFilterProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
+ Object... nodeOutputs) throws SemanticException {
+ FilterOperator op = (FilterOperator) nd;
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ ExprNodeDesc condn = op.getConf().getPredicate();
+ // Fold constants
+ Map constants = cppCtx.getPropagatedConstants(op);
+ LOG.debug("Old filter conditions:" + condn.getExprString());
+ ExprNodeDesc newCondn = foldExpr(condn, constants, cppCtx, op, true);
+ if (newCondn instanceof ExprNodeConstantDesc) {
+ ExprNodeConstantDesc c = (ExprNodeConstantDesc) newCondn;
+ if (c.getValue() == Boolean.TRUE) {
+ cppCtx.addOpToDelete(op);
+ LOG.debug("Filter expression " + condn + " holds true. Will delete it.");
+ } else if (c.getValue() == Boolean.FALSE) {
+ LOG.warn("Filter expression " + condn + " holds false!");
+ }
+ }
+ LOG.debug("New filter conditions:" + newCondn.getExprString());
+
+ // merge it with the downstream col list
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ op.getConf().setPredicate(newCondn);
+ foldOperator(op, constants);
+ return null;
+ }
+
+ }
+
+ /**
+ * Factory method to get the ConstantPropagateFilterProc class.
+ *
+ * @return ConstantPropagateFilterProc
+ */
+ public static ConstantPropagateFilterProc getFilterProc() {
+ return new ConstantPropagateFilterProc();
+ }
+
+ /**
+ * Node Processor for Constant Propagate for Group By Operators.
+ */
+ public static class ConstantPropagateGroupByProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
+ Object... nodeOutputs) throws SemanticException {
+ GroupByOperator op = (GroupByOperator) nd;
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ GroupByDesc conf = op.getConf();
+ ArrayList keys = conf.getKeys();
+
+ Map colToConstants = cppCtx.getPropagatedConstants(op);
+ for (int i = 0; i < keys.size(); i++) {
+ ExprNodeDesc key = keys.get(i);
+ ExprNodeDesc newkey = foldExpr(key, colToConstants, cppCtx, op, true);
+ keys.set(i, newkey);
+ }
+
+ ArrayList aggrs = conf.getAggregators();
+
+ cppCtx.getOpToConstantExprs().put(op, colToConstants);
+
+ foldOperator(op, colToConstants);
+ return null;
+ }
+ }
+
+ /**
+ * Factory method to get the ConstantPropagateGroupByProc class.
+ *
+ * @return ConstantPropagateGroupByProc
+ */
+ public static ConstantPropagateGroupByProc getGroupByProc() {
+ return new ConstantPropagateGroupByProc();
+ }
+
+ /**
+ * The Default Node Processor for Constant Propagation.
+ */
+ public static class ConstantPropagateDefaultProc implements NodeProcessor {
+ @SuppressWarnings("unchecked")
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
+ Object... nodeOutputs) throws SemanticException {
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Operator extends Serializable> op = (Operator extends Serializable>) nd;
+ Map constants = cppCtx.getPropagatedConstants(op);
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ foldOperator(op, constants);
+ return null;
+ }
+ }
+
+ /**
+ * Factory method to get the ConstantPropagateDefaultProc class.
+ *
+ * @return ConstantPropagateDefaultProc
+ */
+ public static ConstantPropagateDefaultProc getDefaultProc() {
+ return new ConstantPropagateDefaultProc();
+ }
+
+ /**
+ * The Node Processor for Constant Propagation for Select Operators.
+ */
+ public static class ConstantPropagateSelectProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
+ Object... nodeOutputs) throws SemanticException {
+ SelectOperator op = (SelectOperator) nd;
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Map constants = cppCtx.getPropagatedConstants(op);
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ List colList = op.getConf().getColList();
+ if (colList != null) {
+ for (int i = 0; i < colList.size(); i++) {
+ ExprNodeDesc newCol = foldExpr(colList.get(i), constants, cppCtx, op, false);
+ colList.set(i, newCol);
+ }
+ }
+ foldOperator(op, constants);
+ LOG.debug("New column list:(" + StringUtils.join(colList, " ") + ")");
+ return null;
+ }
+ }
+
+ /**
+ * The Factory method to get the ConstantPropagateSelectProc class.
+ *
+ * @return ConstantPropagateSelectProc
+ */
+ public static ConstantPropagateSelectProc getSelectProc() {
+ return new ConstantPropagateSelectProc();
+ }
+
+ /**
+ * The Node Processor for constant propagation for FileSink Operators. In addition to constant
+ * propagation, this processor also prunes dynamic partitions to static partitions if possible.
+ */
+ public static class ConstantPropagateFileSinkProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
+ Object... nodeOutputs) throws SemanticException {
+ FileSinkOperator op = (FileSinkOperator) nd;
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Map constants = cppCtx.getPropagatedConstants(op);
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ foldOperator(op, constants);
+ FileSinkDesc fsdesc = op.getConf();
+ DynamicPartitionCtx dpCtx = fsdesc.getDynPartCtx();
+ if (dpCtx != null) {
+
+ // If all dynamic partitions are propagated as constant, remove DP.
+ Set inputs = dpCtx.getInputToDPCols().keySet();
+
+ // Assume only 1 parent for FS operator
+ Operator extends Serializable> parent = op.getParentOperators().get(0);
+ Map parentConstants = cppCtx
+ .getPropagatedConstants(parent);
+ RowResolver rr = cppCtx.getOpToParseCtxMap().get(parent).getRowResolver();
+ boolean allConstant = true;
+ for (String input : inputs) {
+ String tmp[] = rr.reverseLookup(input);
+ ColumnInfo ci = rr.get(tmp[0], tmp[1]);
+ if (parentConstants.get(ci) == null) {
+ allConstant = false;
+ break;
+ }
+ }
+ if (allConstant) {
+ pruneDP(fsdesc);
+ }
+ }
+ return null;
+ }
+
+ private void pruneDP(FileSinkDesc fsdesc) {
+ // FIXME: Support pruning dynamic partitioning.
+ LOG.info("DP can be rewritten to SP!");
+ }
+ }
+
+ public static NodeProcessor getFileSinkProc() {
+ return new ConstantPropagateFileSinkProc();
+ }
+
+ /**
+ * The Node Processor for Constant Propagation for Union Operators. Propagate of constant will
+ * stop on union operators.
+ */
+ public static class ConstantPropagateUnionProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
+ Object... nodeOutputs) throws SemanticException {
+ UnionOperator op = (UnionOperator) nd;
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ cppCtx.getOpToConstantExprs().put(op, new HashMap());
+ return null;
+ }
+ }
+
+ public static NodeProcessor getUnionProc() {
+ return new ConstantPropagateUnionProc();
+ }
+
+ /**
+ * The Node Processor for Constant Propagation for ReduceSink Operators. If the RS Operator is for
+ * a join, then only those constants from inner join tables, or from the 'inner side' of a outer
+ * join (left table for left outer join and vice versa) can be propagated. .
+ */
+ public static class ConstantPropagateReduceSinkProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
+ Object... nodeOutputs) throws SemanticException {
+ ReduceSinkOperator op = (ReduceSinkOperator) nd;
+ ReduceSinkDesc rsDesc = op.getConf();
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Map constants = cppCtx.getPropagatedConstants(op);
+
+ if (op.getChildOperators().size() == 1
+ && op.getChildOperators().get(0) instanceof JoinOperator) {
+ JoinOperator joinOp = (JoinOperator) op.getChildOperators().get(0);
+ if (skipFolding(joinOp.getConf(), op.getConf().getTag())) {
+ LOG.debug("Outer join in the future, skip folding " + op + ".");
+ cppCtx.getOpToConstantExprs().put(op, new HashMap());
+ return null;
+ }
+ }
+ // key columns
+ ArrayList newKeyExprs = new ArrayList();
+ for (ExprNodeDesc desc : rsDesc.getKeyCols()) {
+ newKeyExprs.add(foldExpr(desc, constants, cppCtx, op, false));
+ }
+ rsDesc.setKeyCols(newKeyExprs);
+
+ // partition columns
+ ArrayList newPartExprs = new ArrayList();
+ for (ExprNodeDesc desc : rsDesc.getPartitionCols()) {
+ newPartExprs.add(foldExpr(desc, constants, cppCtx, op, false));
+ }
+ rsDesc.setPartitionCols(newPartExprs);
+
+ // value columns
+ ArrayList newValExprs = new ArrayList();
+ for (ExprNodeDesc desc : rsDesc.getValueCols()) {
+ newValExprs.add(foldExpr(desc, constants, cppCtx, op, false));
+ }
+ rsDesc.setValueCols(newValExprs);
+
+ foldOperator(op, constants);
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ return null;
+ }
+
+ private boolean skipFolding(JoinDesc joinDesc, int tag) {
+ JoinCondDesc[] conds = joinDesc.getConds();
+ int i;
+ for (i = conds.length - 1; i >= 0; i--) {
+ if (conds[i].getType() == JoinDesc.INNER_JOIN) {
+ if (tag == i + 1)
+ return false;
+ } else if (conds[i].getType() == JoinDesc.FULL_OUTER_JOIN) {
+ return true;
+ } else if (conds[i].getType() == JoinDesc.RIGHT_OUTER_JOIN) {
+ if (tag == i + 1)
+ return false;
+ return true;
+ } else if (conds[i].getType() == JoinDesc.LEFT_OUTER_JOIN) {
+ if (tag == i + 1)
+ return true;
+ }
+ }
+ if (tag == 0) {
+ return false;
+ }
+ return true;
+ }
+ }
+
+ public static NodeProcessor getReduceSinkProc() {
+ return new ConstantPropagateReduceSinkProc();
+ }
+
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (revision 1558643)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (working copy)
@@ -52,11 +52,15 @@
transformations = new ArrayList();
// Add the transformation that computes the lineage information.
transformations.add(new Generator());
+
if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTPPD)) {
transformations.add(new PredicateTransitivePropagate());
transformations.add(new PredicatePushDown());
transformations.add(new PartitionPruner());
transformations.add(new PartitionConditionRemover());
+ if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTCONSTANTPROPAGATION)) {
+ transformations.add(new ConstantPropagate());
+ }
if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTLISTBUCKETING)) {
/* Add list bucketing pruner. */
transformations.add(new ListBucketingPruner());
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (revision 1558643)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (working copy)
@@ -20,6 +20,7 @@
import java.io.Serializable;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -215,4 +216,8 @@
return true;
}
+
+ public void setObjectinspector(ObjectInspector oi) {
+ this.objectInspector = oi;
+ }
}
Index: contrib/src/test/results/clientpositive/udf_example_add.q.out
===================================================================
--- contrib/src/test/results/clientpositive/udf_example_add.q.out (revision 1558643)
+++ contrib/src/test/results/clientpositive/udf_example_add.q.out (working copy)
@@ -38,19 +38,19 @@
alias: src
Select Operator
expressions:
- expr: example_add(1, 2)
+ expr: 3
type: int
- expr: example_add(1, 2, 3)
+ expr: 6
type: int
- expr: example_add(1, 2, 3, 4)
+ expr: 10
type: int
- expr: example_add(1.1, 2.2)
+ expr: 3.3000000000000003
type: double
- expr: example_add(1.1, 2.2, 3.3)
+ expr: 6.6
type: double
- expr: example_add(1.1, 2.2, 3.3, 4.4)
+ expr: 11.0
type: double
- expr: example_add(1, 2, 3, 4.4)
+ expr: 10.4
type: double
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
Limit
Index: contrib/src/test/results/clientpositive/udf_example_format.q.out
===================================================================
--- contrib/src/test/results/clientpositive/udf_example_format.q.out (revision 1558643)
+++ contrib/src/test/results/clientpositive/udf_example_format.q.out (working copy)
@@ -32,13 +32,13 @@
alias: src
Select Operator
expressions:
- expr: example_format('abc')
+ expr: 'abc'
type: string
- expr: example_format('%1$s', 1.1)
+ expr: '1.1'
type: string
- expr: example_format('%1$s %2$e', 1.1, 1.2)
+ expr: '1.1 1.200000e+00'
type: string
- expr: example_format('%1$x %2$o %3$d', 10, 10, 10)
+ expr: 'a 12 10'
type: string
outputColumnNames: _col0, _col1, _col2, _col3
Limit
Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
===================================================================
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1558643)
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy)
@@ -873,8 +873,8 @@
HIVE_VECTORIZATION_GROUPBY_CHECKINTERVAL("hive.vectorized.groupby.checkinterval", 100000),
HIVE_VECTORIZATION_GROUPBY_MAXENTRIES("hive.vectorized.groupby.maxentries", 1000000),
HIVE_VECTORIZATION_GROUPBY_FLUSH_PERCENT("hive.vectorized.groupby.flush.percent", (float) 0.1),
-
+
HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true),
// Whether to send the query plan via local resource or RPC
@@ -888,6 +888,8 @@
HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES("hive.explain.dependency.append.tasktype", false),
HIVECOUNTERGROUP("hive.counters.group.name", "HIVE"),
+
+ HIVEOPTCONSTANTPROPAGATION("hive.optimize.constant.propagation", true),
// none, column
// none is the default(past) behavior. Implies only alphaNumeric and underscore are valid characters in identifiers.
Index: hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
===================================================================
--- hbase-handler/src/test/results/positive/ppd_key_ranges.q.out (revision 1558643)
+++ hbase-handler/src/test/results/positive/ppd_key_ranges.q.out (working copy)
@@ -215,7 +215,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 11
type: int
expr: value
type: string
Index: hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out
===================================================================
--- hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out (revision 1558643)
+++ hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out (working copy)
@@ -207,7 +207,7 @@
type: boolean
Filter Operator
predicate:
- expr: (key >= UDFToString((40 + 50)))
+ expr: (key >= '90')
type: boolean
Select Operator
expressions:
Index: hbase-handler/src/test/results/positive/hbase_pushdown.q.out
===================================================================
--- hbase-handler/src/test/results/positive/hbase_pushdown.q.out (revision 1558643)
+++ hbase-handler/src/test/results/positive/hbase_pushdown.q.out (working copy)
@@ -48,7 +48,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 90
type: int
expr: value
type: string
@@ -211,7 +211,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 90
type: int
expr: value
type: string
@@ -394,7 +394,7 @@
type: boolean
Select Operator
expressions:
- expr: key
+ expr: 90
type: int
expr: value
type: string