Index: ql/src/test/results/clientpositive/groupby_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/groupby_ppr.q.out (working copy) @@ -69,10 +69,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -84,10 +89,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -106,10 +116,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -121,10 +136,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats8.q.out =================================================================== --- ql/src/test/results/clientpositive/stats8.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats8.q.out (working copy) @@ -77,9 +77,9 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11) +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -89,15 +89,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart +POSTHOOK: query: describe formatted analyze_srcpart POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -107,12 +136,42 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics PREHOOK: type: QUERY POSTHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics @@ -162,9 +221,9 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12) +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -174,12 +233,41 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics PREHOOK: type: QUERY POSTHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics @@ -229,9 +317,9 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11) +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -241,12 +329,41 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-09, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics PREHOOK: type: QUERY POSTHOOK: query: explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics @@ -296,9 +413,9 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12) +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -308,12 +425,41 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-09, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: explain analyze table analyze_srcpart PARTITION(ds, hr) compute statistics PREHOOK: type: QUERY POSTHOOK: query: explain analyze table analyze_srcpart PARTITION(ds, hr) compute statistics @@ -375,9 +521,9 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11) +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -387,15 +533,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 0 + rawDataSize 0 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -405,15 +580,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 0 + rawDataSize 0 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -423,15 +627,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-09, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 2000 + rawDataSize 21248 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -441,15 +674,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-09, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 0 + rawDataSize 0 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart +POSTHOOK: query: describe formatted analyze_srcpart POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -459,9 +721,39 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 4 + numPartitions 4 + numRows 2000 + rawDataSize 21248 + totalSize 23248 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/results/clientpositive/input_part7.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part7.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/input_part7.q.out (working copy) @@ -149,10 +149,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -164,10 +169,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -186,10 +196,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -201,10 +216,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketmapjoin5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin5.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out (working copy) @@ -252,10 +252,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -268,10 +273,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 8 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -290,10 +300,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -306,10 +321,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 8 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -781,10 +801,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 2 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -797,10 +822,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 6124 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -819,10 +849,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 2 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -835,10 +870,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 6124 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 Index: ql/src/test/results/clientpositive/pcr.q.out =================================================================== --- ql/src/test/results/clientpositive/pcr.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/pcr.q.out (working copy) @@ -4833,10 +4833,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -4848,10 +4853,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -4992,10 +5002,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -5007,10 +5022,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5029,10 +5049,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -5044,10 +5069,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5193,10 +5223,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -5208,10 +5243,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5230,10 +5270,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -5245,10 +5290,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats3.q.out =================================================================== --- ql/src/test/results/clientpositive/stats3.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats3.q.out (working copy) @@ -11,12 +11,90 @@ POSTHOOK: query: create table hive_test_src ( col1 string ) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@hive_test_src +PREHOOK: query: explain extended +load data local inpath '../data/files/test.dat' overwrite into table hive_test_src +PREHOOK: type: LOAD +POSTHOOK: query: explain extended +load data local inpath '../data/files/test.dat' overwrite into table hive_test_src +POSTHOOK: type: LOAD +ABSTRACT SYNTAX TREE: + (TOK_LOAD '../data/files/test.dat' (TOK_TAB (TOK_TABNAME hive_test_src)) local overwrite) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + Stage-1 depends on stages: Stage-0 + Stage-2 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-0 + Copy +#### A masked pattern was here #### + + Stage: Stage-1 + Move Operator + tables: + replace: true +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns col1 + columns.types string +#### A masked pattern was here #### + name default.hive_test_src + serialization.ddl struct hive_test_src { string col1} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.hive_test_src +#### A masked pattern was here #### + + Stage: Stage-2 + Stats-Aggr Operator + + PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src PREHOOK: type: LOAD PREHOOK: Output: default@hive_test_src POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src POSTHOOK: type: LOAD POSTHOOK: Output: default@hive_test_src +PREHOOK: query: desc formatted hive_test_src +PREHOOK: type: DESCTABLE +POSTHOOK: query: desc formatted hive_test_src +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +col1 string None + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 + totalSize 11 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile PREHOOK: type: CREATETABLE POSTHOOK: query: create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile @@ -117,17 +195,47 @@ #### A masked pattern was here #### POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] -PREHOOK: query: describe extended hive_test_dst +PREHOOK: query: describe formatted hive_test_dst PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended hive_test_dst +POSTHOOK: query: describe formatted hive_test_dst POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] -col1 string -pcol1 string -pcol2 string +# col_name data_type comment +col1 string None + +# Partition Information +# col_name data_type comment + +pcol1 string None +pcol2 string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 1 + numRows 6 + rawDataSize 6 + totalSize 171 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: drop table hive_test_src PREHOOK: type: DROPTABLE PREHOOK: Input: default@hive_test_src Index: ql/src/test/results/clientpositive/join33.q.out =================================================================== --- ql/src/test/results/clientpositive/join33.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/join33.q.out (working copy) @@ -98,9 +98,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -112,9 +117,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -197,10 +207,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -212,10 +227,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/input_part2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part2.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/input_part2.q.out (working copy) @@ -166,10 +166,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -181,10 +186,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -203,10 +213,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -218,10 +233,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/load_dyn_part8.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part8.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/load_dyn_part8.q.out (working copy) @@ -148,10 +148,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -163,10 +168,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -185,10 +195,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -200,10 +215,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -222,10 +242,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -237,10 +262,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -259,10 +289,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -274,10 +309,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/sample9.q.out =================================================================== --- ql/src/test/results/clientpositive/sample9.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/sample9.q.out (working copy) @@ -73,9 +73,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -88,9 +93,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket Index: ql/src/test/results/clientpositive/describe_table.q.out =================================================================== --- ql/src/test/results/clientpositive/describe_table.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/describe_table.q.out (working copy) @@ -67,6 +67,11 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + numFiles 4 + numPartitions 4 + numRows 0 + rawDataSize 0 + totalSize 23248 #### A masked pattern was here #### # Storage Information @@ -109,6 +114,10 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + numRows 0 + rawDataSize 0 + totalSize 5812 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/groupby_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out (working copy) @@ -86,10 +86,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -101,10 +106,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -123,10 +133,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -138,10 +153,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/sample4.q.out =================================================================== --- ql/src/test/results/clientpositive/sample4.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/sample4.q.out (working copy) @@ -88,9 +88,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -103,9 +108,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket Index: ql/src/test/results/clientpositive/stats18.q.out =================================================================== --- ql/src/test/results/clientpositive/stats18.q.out (revision 0) +++ ql/src/test/results/clientpositive/stats18.q.out (working copy) @@ -0,0 +1,127 @@ +PREHOOK: query: create table stats_part like srcpart +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table stats_part like srcpart +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@stats_part +PREHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') select key, value from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13 +POSTHOOK: query: insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') select key, value from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=13 +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: -- Load a file into a existing partition +-- Some stats (numFiles, totalSize) should be updated correctly +-- Some other stats (numRows, rawDataSize) should be cleared +desc formatted stats_part partition (ds='2010-04-08', hr='13') +PREHOOK: type: DESCTABLE +POSTHOOK: query: -- Load a file into a existing partition +-- Some stats (numFiles, totalSize) should be updated correctly +-- Some other stats (numRows, rawDataSize) should be cleared +desc formatted stats_part partition (ds='2010-04-08', hr='13') +POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +# col_name data_type comment + +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2010-04-08, 13] +Database: default +Table: stats_part +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13') +PREHOOK: type: LOAD +PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13 +POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13') +POSTHOOK: type: LOAD +POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=13 +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr='13') +PREHOOK: type: DESCTABLE +POSTHOOK: query: desc formatted stats_part partition (ds='2010-04-08', hr='13') +POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +# col_name data_type comment + +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2010-04-08, 13] +Database: default +Table: stats_part +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 2 + numRows 0 + rawDataSize 0 + totalSize 7170 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: drop table stats_src +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table stats_src +POSTHOOK: type: DROPTABLE +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: drop table stats_part +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@stats_part +PREHOOK: Output: default@stats_part +POSTHOOK: query: drop table stats_part +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@stats_part +POSTHOOK: Output: default@stats_part +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/stats13.q.out =================================================================== --- ql/src/test/results/clientpositive/stats13.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats13.q.out (working copy) @@ -123,9 +123,9 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: desc extended analyze_srcpart +PREHOOK: query: desc formatted analyze_srcpart PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart +POSTHOOK: query: desc formatted analyze_srcpart POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -135,15 +135,45 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### -PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11) +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11) +POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -153,15 +183,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12) +POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -171,15 +230,40 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11) +POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -189,15 +273,40 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-09, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12) +POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -207,12 +316,37 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-09, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: create table analyze_srcpart2 like analyze_srcpart PREHOOK: type: CREATETABLE POSTHOOK: query: create table analyze_srcpart2 like analyze_srcpart @@ -226,9 +360,9 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: desc extended analyze_srcpart2 +PREHOOK: query: desc formatted analyze_srcpart2 PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart2 +POSTHOOK: query: desc formatted analyze_srcpart2 POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -238,9 +372,34 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/results/clientpositive/rand_partitionpruner1.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (working copy) @@ -63,9 +63,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -77,9 +82,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/bucketcontext_2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_2.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketcontext_2.q.out (working copy) @@ -157,10 +157,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 2 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -174,10 +179,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -197,10 +207,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 2 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -214,10 +229,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -405,10 +425,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 2 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -422,10 +447,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -445,10 +475,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 2 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -462,10 +497,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucket2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket2.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucket2.q.out (working copy) @@ -60,9 +60,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -74,9 +79,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (working copy) @@ -96,10 +96,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -111,10 +116,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -133,10 +143,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -148,10 +163,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/join17.q.out =================================================================== --- ql/src/test/results/clientpositive/join17.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/join17.q.out (working copy) @@ -74,9 +74,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -88,9 +93,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/input_part9.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part9.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/input_part9.q.out (working copy) @@ -70,10 +70,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -85,10 +90,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -107,10 +117,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -122,10 +137,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/join26.q.out =================================================================== --- ql/src/test/results/clientpositive/join26.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/join26.q.out (working copy) @@ -155,10 +155,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -170,10 +175,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (working copy) @@ -173,9 +173,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -188,9 +193,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin Index: ql/src/test/results/clientpositive/stats5.q.out =================================================================== --- ql/src/test/results/clientpositive/stats5.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats5.q.out (working copy) @@ -36,11 +36,37 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@analyze_src POSTHOOK: Output: default@analyze_src -PREHOOK: query: describe extended analyze_src +PREHOOK: query: describe formatted analyze_src PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_src +POSTHOOK: query: describe formatted analyze_src POSTHOOK: type: DESCTABLE -key string -value string +# col_name data_type comment +key string None +value string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 0 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/results/clientpositive/join35.q.out =================================================================== --- ql/src/test/results/clientpositive/join35.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/join35.q.out (working copy) @@ -93,9 +93,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -107,9 +112,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -566,9 +576,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -580,9 +595,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy) @@ -197,10 +197,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -213,10 +218,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -684,10 +694,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 2 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -700,10 +715,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 2 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -1365,10 +1385,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1381,10 +1406,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part Index: ql/src/test/results/clientpositive/join_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy) @@ -161,10 +161,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -176,10 +181,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -710,10 +720,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -725,10 +740,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats0.q.out =================================================================== --- ql/src/test/results/clientpositive/stats0.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats0.q.out (working copy) @@ -73,9 +73,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -87,9 +92,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -1391,9 +1401,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1405,9 +1420,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/join9.q.out =================================================================== --- ql/src/test/results/clientpositive/join9.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/join9.q.out (working copy) @@ -74,9 +74,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -88,9 +93,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -109,10 +119,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -124,10 +139,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out =================================================================== --- ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (working copy) @@ -78,10 +78,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -93,10 +98,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -115,10 +125,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -130,10 +145,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -246,10 +266,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -261,10 +286,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -283,10 +313,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -298,10 +333,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -320,10 +360,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -335,10 +380,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -357,10 +407,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -372,10 +427,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/sample6.q.out =================================================================== --- ql/src/test/results/clientpositive/sample6.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/sample6.q.out (working copy) @@ -86,9 +86,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -101,9 +106,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket @@ -614,9 +624,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -629,9 +644,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket @@ -976,9 +996,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -991,9 +1016,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket @@ -1592,9 +1622,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1607,9 +1642,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket @@ -2051,9 +2091,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2066,9 +2111,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket @@ -2496,9 +2546,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket2 + numFiles 4 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2511,9 +2566,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket2 + numFiles 4 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 @@ -2530,9 +2590,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket2 + numFiles 4 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2545,9 +2610,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket2 + numFiles 4 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 @@ -2775,9 +2845,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket2 + numFiles 4 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2790,9 +2865,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket2 + numFiles 4 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 Index: ql/src/test/results/clientpositive/sample1.q.out =================================================================== --- ql/src/test/results/clientpositive/sample1.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/sample1.q.out (working copy) @@ -105,10 +105,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -120,10 +125,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/rand_partitionpruner3.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (working copy) @@ -72,10 +72,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -87,10 +92,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -186,10 +196,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -201,10 +216,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketcontext_4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_4.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketcontext_4.q.out (working copy) @@ -169,10 +169,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 2 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -186,10 +191,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 2 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -377,10 +387,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 2 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -394,10 +409,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 2 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/stats10.q.out =================================================================== --- ql/src/test/results/clientpositive/stats10.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats10.q.out (working copy) @@ -430,9 +430,9 @@ POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: describe extended bucket3_1 partition (ds='1') +PREHOOK: query: describe formatted bucket3_1 partition (ds='1') PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended bucket3_1 partition (ds='1') +POSTHOOK: query: describe formatted bucket3_1 partition (ds='1') POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -440,14 +440,43 @@ POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -key int -value string -ds string +# col_name data_type comment +key int None +value string None + +# Partition Information +# col_name data_type comment + +ds string None + +# Detailed Partition Information +Partition Value: [1] +Database: default +Table: bucket3_1 #### A masked pattern was here #### -PREHOOK: query: describe extended bucket3_1 partition (ds='2') +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 2 + numRows 0 + rawDataSize 0 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [key] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted bucket3_1 partition (ds='2') PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended bucket3_1 partition (ds='2') +POSTHOOK: query: describe formatted bucket3_1 partition (ds='2') POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -455,14 +484,43 @@ POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -key int -value string -ds string +# col_name data_type comment +key int None +value string None + +# Partition Information +# col_name data_type comment + +ds string None + +# Detailed Partition Information +Partition Value: [2] +Database: default +Table: bucket3_1 #### A masked pattern was here #### -PREHOOK: query: describe extended bucket3_1 +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 2 + numRows 1000 + rawDataSize 10624 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [key] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted bucket3_1 PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended bucket3_1 +POSTHOOK: query: describe formatted bucket3_1 POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -470,8 +528,38 @@ POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -key int -value string -ds string +# col_name data_type comment +key int None +value string None + +# Partition Information +# col_name data_type comment + +ds string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 4 + numPartitions 2 + numRows 1000 + rawDataSize 10624 + totalSize 11624 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [key] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/results/clientpositive/udtf_explode.q.out =================================================================== --- ql/src/test/results/clientpositive/udtf_explode.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/udtf_explode.q.out (working copy) @@ -67,9 +67,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -81,9 +86,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -143,9 +153,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -157,9 +172,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -395,9 +415,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -409,9 +434,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/binary_output_format.q.out =================================================================== --- ql/src/test/results/clientpositive/binary_output_format.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/binary_output_format.q.out (working copy) @@ -128,9 +128,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -142,9 +147,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/stats7.q.out =================================================================== --- ql/src/test/results/clientpositive/stats7.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats7.q.out (working copy) @@ -81,9 +81,9 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11) +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -93,15 +93,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 1000 + rawDataSize 10624 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -111,15 +140,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 0 + rawDataSize 0 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart +POSTHOOK: query: describe formatted analyze_srcpart POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -129,9 +187,39 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 2 + numPartitions 2 + numRows 1000 + rawDataSize 10624 + totalSize 11624 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/results/clientpositive/bucketmapjoin4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin4.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketmapjoin4.q.out (working copy) @@ -212,9 +212,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -227,9 +232,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin @@ -691,9 +701,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -706,9 +721,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin Index: ql/src/test/results/clientpositive/stats2.q.out =================================================================== --- ql/src/test/results/clientpositive/stats2.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats2.q.out (working copy) @@ -82,9 +82,9 @@ POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: desc extended analyze_t1 +PREHOOK: query: desc formatted analyze_t1 PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_t1 +POSTHOOK: query: desc formatted analyze_t1 POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -94,12 +94,37 @@ POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: explain analyze table analyze_t1 partition (ds, hr) compute statistics PREHOOK: type: QUERY POSTHOOK: query: explain analyze table analyze_t1 partition (ds, hr) compute statistics @@ -161,9 +186,9 @@ POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: describe extended analyze_t1 +PREHOOK: query: describe formatted analyze_t1 PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_t1 +POSTHOOK: query: describe formatted analyze_t1 POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -173,9 +198,39 @@ POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_t1 PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 4 + numPartitions 4 + numRows 2000 + rawDataSize 21248 + totalSize 23248 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/results/clientpositive/join32.q.out =================================================================== --- ql/src/test/results/clientpositive/join32.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/join32.q.out (working copy) @@ -104,9 +104,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -118,9 +123,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/input_part1.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part1.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/input_part1.q.out (working copy) @@ -101,10 +101,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -116,10 +121,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/sample8.q.out =================================================================== --- ql/src/test/results/clientpositive/sample8.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/sample8.q.out (working copy) @@ -84,10 +84,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -99,10 +104,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -121,10 +131,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -136,10 +151,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -158,10 +178,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -173,10 +198,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -195,10 +225,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -210,10 +245,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/transform_ppr2.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr2.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/transform_ppr2.q.out (working copy) @@ -91,10 +91,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -106,10 +111,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -128,10 +138,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -143,10 +158,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/union_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/union_ppr.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/union_ppr.q.out (working copy) @@ -151,10 +151,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -166,10 +171,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -188,10 +198,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -203,10 +218,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats12.q.out =================================================================== --- ql/src/test/results/clientpositive/stats12.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats12.q.out (working copy) @@ -164,9 +164,9 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: desc extended analyze_srcpart +PREHOOK: query: desc formatted analyze_srcpart PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart +POSTHOOK: query: desc formatted analyze_srcpart POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -176,15 +176,45 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### -PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11) +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 2 + numPartitions 2 + numRows 1000 + rawDataSize 10624 + totalSize 11624 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11) +POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -194,15 +224,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 0 + rawDataSize 0 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12) +POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -212,15 +271,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 1000 + rawDataSize 10624 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11) +POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -230,15 +318,40 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-09, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12) +POSTHOOK: query: desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -248,9 +361,34 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-09, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/results/clientpositive/router_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/router_join_ppr.q.out (working copy) @@ -90,9 +90,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -104,9 +109,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -125,10 +135,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -140,10 +155,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -162,10 +182,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -177,10 +202,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -199,10 +229,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -214,10 +249,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -236,10 +276,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -251,10 +296,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -440,9 +490,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -454,9 +509,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -475,10 +535,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -490,10 +555,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -512,10 +582,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -527,10 +602,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -711,9 +791,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -725,9 +810,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -746,10 +836,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -761,10 +856,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -783,10 +883,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -798,10 +903,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -982,9 +1092,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -996,9 +1111,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -1017,10 +1137,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1032,10 +1157,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1054,10 +1184,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1069,10 +1204,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1091,10 +1231,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1106,10 +1251,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1128,10 +1278,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1143,10 +1298,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketcontext_1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_1.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketcontext_1.q.out (working copy) @@ -169,10 +169,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -186,10 +191,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 8 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -209,10 +219,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -226,10 +241,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 8 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -386,10 +406,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -403,10 +428,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 8 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -426,10 +456,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -443,10 +478,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 8 + numPartitions 2 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucket1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket1.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucket1.q.out (working copy) @@ -60,9 +60,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -74,9 +79,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/input42.q.out =================================================================== --- ql/src/test/results/clientpositive/input42.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/input42.q.out (working copy) @@ -65,10 +65,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -80,10 +85,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -102,10 +112,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -117,10 +132,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1232,10 +1252,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1247,10 +1272,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1269,10 +1299,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1284,10 +1319,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1777,10 +1817,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1792,10 +1837,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1814,10 +1864,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1829,10 +1884,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats9.q.out =================================================================== --- ql/src/test/results/clientpositive/stats9.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats9.q.out (working copy) @@ -48,13 +48,39 @@ POSTHOOK: Output: default@analyze_srcbucket POSTHOOK: Lineage: analyze_srcbucket.key SIMPLE [(srcbucket)srcbucket.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: analyze_srcbucket.value SIMPLE [(srcbucket)srcbucket.FieldSchema(name:value, type:string, comment:null), ] -PREHOOK: query: describe extended analyze_srcbucket +PREHOOK: query: describe formatted analyze_srcbucket PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcbucket +POSTHOOK: query: describe formatted analyze_srcbucket POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcbucket.key SIMPLE [(srcbucket)srcbucket.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: analyze_srcbucket.value SIMPLE [(srcbucket)srcbucket.FieldSchema(name:value, type:string, comment:null), ] -key int -value string +# col_name data_type comment +key int None +value string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 0 + numRows 1000 + rawDataSize 10603 + totalSize 11603 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [key] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/results/clientpositive/stats4.q.out =================================================================== --- ql/src/test/results/clientpositive/stats4.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats4.q.out (working copy) @@ -2315,9 +2315,9 @@ 400 val_400 2008-12-31 12 200 val_200 2008-12-31 12 97 val_97 2008-12-31 12 -PREHOOK: query: describe extended nzhang_part1 partition(ds='2008-04-08',hr=11) +PREHOOK: query: describe formatted nzhang_part1 partition(ds='2008-04-08',hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended nzhang_part1 partition(ds='2008-04-08',hr=11) +POSTHOOK: query: describe formatted nzhang_part1 partition(ds='2008-04-08',hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -2327,15 +2327,44 @@ POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: default +Table: nzhang_part1 #### A masked pattern was here #### -PREHOOK: query: describe extended nzhang_part1 partition(ds='2008-04-08',hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted nzhang_part1 partition(ds='2008-04-08',hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended nzhang_part1 partition(ds='2008-04-08',hr=12) +POSTHOOK: query: describe formatted nzhang_part1 partition(ds='2008-04-08',hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -2345,15 +2374,44 @@ POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 12] +Database: default +Table: nzhang_part1 #### A masked pattern was here #### -PREHOOK: query: describe extended nzhang_part2 partition(ds='2008-12-31',hr=11) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted nzhang_part2 partition(ds='2008-12-31',hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended nzhang_part2 partition(ds='2008-12-31',hr=11) +POSTHOOK: query: describe formatted nzhang_part2 partition(ds='2008-12-31',hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -2363,15 +2421,44 @@ POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-12-31, 11] +Database: default +Table: nzhang_part2 #### A masked pattern was here #### -PREHOOK: query: describe extended nzhang_part2 partition(ds='2008-12-31',hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted nzhang_part2 partition(ds='2008-12-31',hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended nzhang_part2 partition(ds='2008-12-31',hr=12) +POSTHOOK: query: describe formatted nzhang_part2 partition(ds='2008-12-31',hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -2381,15 +2468,44 @@ POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-12-31, 12] +Database: default +Table: nzhang_part2 #### A masked pattern was here #### -PREHOOK: query: describe extended nzhang_part1 +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted nzhang_part1 PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended nzhang_part1 +POSTHOOK: query: describe formatted nzhang_part1 POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -2399,15 +2515,45 @@ POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### -PREHOOK: query: describe extended nzhang_part2 +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 2 + numPartitions 2 + numRows 1000 + rawDataSize 10624 + totalSize 11624 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted nzhang_part2 PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended nzhang_part2 +POSTHOOK: query: describe formatted nzhang_part2 POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -2417,12 +2563,42 @@ POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part2 PARTITION(ds=2008-12-31,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 2 + numPartitions 2 + numRows 1000 + rawDataSize 10624 + totalSize 11624 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: drop table nzhang_part1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@nzhang_part1 Index: ql/src/test/results/clientpositive/join34.q.out =================================================================== --- ql/src/test/results/clientpositive/join34.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/join34.q.out (working copy) @@ -221,9 +221,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -235,9 +240,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy) @@ -466,9 +466,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -481,9 +486,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin @@ -953,10 +963,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -969,10 +984,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (working copy) @@ -88,9 +88,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -102,9 +107,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -123,10 +133,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -138,10 +153,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -160,10 +180,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -175,10 +200,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -359,9 +389,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -373,9 +408,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -394,10 +434,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -409,10 +454,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -431,10 +481,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -446,10 +501,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -468,10 +528,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -483,10 +548,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -505,10 +575,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -520,10 +595,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -711,9 +791,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -725,9 +810,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -746,10 +836,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -761,10 +856,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -783,10 +883,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -798,10 +903,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -820,10 +930,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -835,10 +950,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -857,10 +977,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -872,10 +997,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1060,9 +1190,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1074,9 +1209,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -1095,10 +1235,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1110,10 +1255,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1132,10 +1282,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1147,10 +1302,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/udf_java_method.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_java_method.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/udf_java_method.q.out (working copy) @@ -100,9 +100,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -114,9 +119,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/sample5.q.out =================================================================== --- ql/src/test/results/clientpositive/sample5.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/sample5.q.out (working copy) @@ -86,9 +86,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -101,9 +106,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket Index: ql/src/test/results/clientpositive/udf_explode.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_explode.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/udf_explode.q.out (working copy) @@ -67,9 +67,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -81,9 +86,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -143,9 +153,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -157,9 +172,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -365,9 +385,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -379,9 +404,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -443,9 +473,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -457,9 +492,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/rand_partitionpruner2.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (working copy) @@ -94,10 +94,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -109,10 +114,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -131,10 +141,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -146,10 +161,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketcontext_3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_3.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketcontext_3.q.out (working copy) @@ -157,10 +157,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -174,10 +179,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -365,10 +375,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -382,10 +397,15 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucket3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket3.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucket3.q.out (working copy) @@ -60,9 +60,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -74,9 +79,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (working copy) @@ -71,10 +71,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -86,10 +91,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -108,10 +118,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -123,10 +138,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/input4.q.out =================================================================== --- ql/src/test/results/clientpositive/input4.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/input4.q.out (working copy) @@ -15,6 +15,7 @@ STAGE DEPENDENCIES: Stage-0 is a root stage Stage-1 depends on stages: Stage-0 + Stage-2 depends on stages: Stage-1 STAGE PLANS: Stage: Stage-0 @@ -31,7 +32,10 @@ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.input4 + Stage: Stage-2 + Stats-Aggr Operator + PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4 PREHOOK: type: LOAD PREHOOK: Output: default@input4 Index: ql/src/test/results/clientpositive/stats6.q.out =================================================================== --- ql/src/test/results/clientpositive/stats6.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats6.q.out (working copy) @@ -64,9 +64,9 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11) +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -76,15 +76,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -94,15 +123,44 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-08, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -112,15 +170,40 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-09, 11] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12) +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12) PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12) +POSTHOOK: query: describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12) POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -130,15 +213,40 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Partition Information +Partition Value: [2008-04-09, 12] +Database: default +Table: analyze_srcpart #### A masked pattern was here #### -PREHOOK: query: describe extended analyze_srcpart +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe formatted analyze_srcpart PREHOOK: type: DESCTABLE -POSTHOOK: query: describe extended analyze_srcpart +POSTHOOK: query: describe formatted analyze_srcpart POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] @@ -148,9 +256,39 @@ POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -key string default -value string default -ds string -hr string +# col_name data_type comment +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string None +hr string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 2 + numPartitions 2 + numRows 1000 + rawDataSize 10624 + totalSize 11624 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy) @@ -214,10 +214,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 2 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -230,10 +235,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 2 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -701,10 +711,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -717,10 +732,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part Index: ql/src/test/results/clientpositive/stats1.q.out =================================================================== --- ql/src/test/results/clientpositive/stats1.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats1.q.out (working copy) @@ -185,13 +185,89 @@ 66 val_66 98 val_98 tst1 500 -PREHOOK: query: DESCRIBE EXTENDED tmptable +PREHOOK: query: DESCRIBE FORMATTED tmptable PREHOOK: type: DESCTABLE -POSTHOOK: query: DESCRIBE EXTENDED tmptable +POSTHOOK: query: DESCRIBE FORMATTED tmptable POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: tmptable.key EXPRESSION [(src1)s2.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmptable.value EXPRESSION [(src)s1.null, (src1)s2.FieldSchema(name:value, type:string, comment:default), ] -key string -value string +# col_name data_type comment +key string None +value string None + +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 2 + numPartitions 0 + numRows 26 + rawDataSize 199 + totalSize 225 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: -- Load a file into a existing table +-- Some stats (numFiles, totalSize) should be updated correctly +-- Some other stats (numRows, rawDataSize) should be cleared +load data local inpath '../data/files/srcbucket20.txt' INTO TABLE tmptable +PREHOOK: type: LOAD +PREHOOK: Output: default@tmptable +POSTHOOK: query: -- Load a file into a existing table +-- Some stats (numFiles, totalSize) should be updated correctly +-- Some other stats (numRows, rawDataSize) should be cleared +load data local inpath '../data/files/srcbucket20.txt' INTO TABLE tmptable +POSTHOOK: type: LOAD +POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.key EXPRESSION [(src1)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.value EXPRESSION [(src)s1.null, (src1)s2.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: DESCRIBE FORMATTED tmptable +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED tmptable +POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: tmptable.key EXPRESSION [(src1)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.value EXPRESSION [(src)s1.null, (src1)s2.FieldSchema(name:value, type:string, comment:default), ] +# col_name data_type comment + +key string None +value string None + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 3 + numPartitions 0 + numRows 0 + rawDataSize 0 + totalSize 1583 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Index: ql/src/test/results/clientpositive/ctas.q.out =================================================================== --- ql/src/test/results/clientpositive/ctas.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/ctas.q.out (working copy) @@ -774,9 +774,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -788,9 +793,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/outer_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/outer_join_ppr.q.out (working copy) @@ -80,9 +80,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -94,9 +99,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -115,10 +125,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -130,10 +145,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -152,10 +172,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -167,10 +192,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -189,10 +219,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -204,10 +239,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -226,10 +266,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -241,10 +286,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -422,9 +472,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -436,9 +491,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -457,10 +517,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -472,10 +537,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -494,10 +564,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -509,10 +584,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -531,10 +611,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -546,10 +631,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -568,10 +658,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -583,10 +678,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/udf_reflect.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_reflect.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/udf_reflect.q.out (working copy) @@ -96,9 +96,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -110,9 +115,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (working copy) @@ -185,9 +185,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -200,9 +205,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin Index: ql/src/test/results/clientpositive/sample7.q.out =================================================================== --- ql/src/test/results/clientpositive/sample7.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/sample7.q.out (working copy) @@ -93,9 +93,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -108,9 +113,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket Index: ql/src/test/results/clientpositive/transform_ppr1.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/transform_ppr1.q.out (working copy) @@ -89,10 +89,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -104,10 +109,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -126,10 +136,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -141,10 +156,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -163,10 +183,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -178,10 +203,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -200,10 +230,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -215,10 +250,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/regexp_extract.q.out =================================================================== --- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy) @@ -87,9 +87,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -101,9 +106,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -333,9 +343,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -347,9 +362,14 @@ columns.types string:string #### A masked pattern was here #### name default.src + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src Index: ql/src/test/results/clientpositive/sample2.q.out =================================================================== --- ql/src/test/results/clientpositive/sample2.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/sample2.q.out (working copy) @@ -88,9 +88,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -103,9 +108,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11603 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket Index: ql/src/test/results/clientpositive/stats11.q.out =================================================================== --- ql/src/test/results/clientpositive/stats11.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/stats11.q.out (working copy) @@ -20,6 +20,41 @@ POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@srcbucket_mapjoin_part +PREHOOK: query: explain +load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') +PREHOOK: type: LOAD +POSTHOOK: query: explain +load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') +POSTHOOK: type: LOAD +ABSTRACT SYNTAX TREE: + (TOK_LOAD '../data/files/srcbucket20.txt' (TOK_TAB (TOK_TABNAME srcbucket_mapjoin_part) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08'))) local) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + Stage-1 depends on stages: Stage-0 + Stage-2 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-0 + Copy +#### A masked pattern was here #### + + Stage: Stage-1 + Move Operator + tables: + partition: + ds 2008-04-08 + replace: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.srcbucket_mapjoin_part + + Stage: Stage-2 + Stats-Aggr Operator + + PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') PREHOOK: type: LOAD PREHOOK: Output: default@srcbucket_mapjoin_part @@ -27,24 +62,176 @@ POSTHOOK: type: LOAD POSTHOOK: Output: default@srcbucket_mapjoin_part POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08') +PREHOOK: type: DESCTABLE +POSTHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08') +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key int None +value string None + +# Partition Information +# col_name data_type comment + +ds string None + +# Detailed Partition Information +Partition Value: [2008-04-08] +Database: default +Table: srcbucket_mapjoin_part +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 0 + rawDataSize 0 + totalSize 1358 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: 4 +Bucket Columns: [key] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') PREHOOK: type: LOAD PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') POSTHOOK: type: LOAD POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08') +PREHOOK: type: DESCTABLE +POSTHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08') +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key int None +value string None + +# Partition Information +# col_name data_type comment + +ds string None + +# Detailed Partition Information +Partition Value: [2008-04-08] +Database: default +Table: srcbucket_mapjoin_part +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 2 + numRows 0 + rawDataSize 0 + totalSize 2750 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: 4 +Bucket Columns: [key] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') PREHOOK: type: LOAD PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') POSTHOOK: type: LOAD POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08') +PREHOOK: type: DESCTABLE +POSTHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08') +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key int None +value string None + +# Partition Information +# col_name data_type comment + +ds string None + +# Detailed Partition Information +Partition Value: [2008-04-08] +Database: default +Table: srcbucket_mapjoin_part +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 3 + numRows 0 + rawDataSize 0 + totalSize 4200 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: 4 +Bucket Columns: [key] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') PREHOOK: type: LOAD PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08') POSTHOOK: type: LOAD POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08 +PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08') +PREHOOK: type: DESCTABLE +POSTHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08') +POSTHOOK: type: DESCTABLE +# col_name data_type comment + +key int None +value string None + +# Partition Information +# col_name data_type comment + +ds string None + +# Detailed Partition Information +Partition Value: [2008-04-08] +Database: default +Table: srcbucket_mapjoin_part +#### A masked pattern was here #### +Protect Mode: None +#### A masked pattern was here #### +Partition Parameters: + numFiles 4 + numRows 0 + rawDataSize 0 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: 4 +Bucket Columns: [key] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE @@ -214,9 +401,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -229,9 +421,14 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin + numFiles 2 + numPartitions 0 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin @@ -701,10 +898,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -717,10 +919,15 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 4 + numPartitions 1 + numRows 0 partition_columns ds + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part Index: ql/src/test/results/clientpositive/input23.q.out =================================================================== --- ql/src/test/results/clientpositive/input23.q.out (revision 1370113) +++ ql/src/test/results/clientpositive/input23.q.out (working copy) @@ -70,10 +70,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 1 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -85,10 +90,15 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 4 + numPartitions 4 + numRows 0 partition_columns ds/hr + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/queries/clientpositive/stats9.q =================================================================== --- ql/src/test/queries/clientpositive/stats9.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats9.q (working copy) @@ -6,4 +6,4 @@ explain analyze table analyze_srcbucket compute statistics; analyze table analyze_srcbucket compute statistics; -describe extended analyze_srcbucket; +describe formatted analyze_srcbucket; Index: ql/src/test/queries/clientpositive/stats10.q =================================================================== --- ql/src/test/queries/clientpositive/stats10.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats10.q (working copy) @@ -23,6 +23,6 @@ explain analyze table bucket3_1 partition (ds) compute statistics; analyze table bucket3_1 partition (ds) compute statistics; -describe extended bucket3_1 partition (ds='1'); -describe extended bucket3_1 partition (ds='2'); -describe extended bucket3_1; +describe formatted bucket3_1 partition (ds='1'); +describe formatted bucket3_1 partition (ds='2'); +describe formatted bucket3_1; Index: ql/src/test/queries/clientpositive/stats2.q =================================================================== --- ql/src/test/queries/clientpositive/stats2.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats2.q (working copy) @@ -12,10 +12,10 @@ insert overwrite table analyze_t1 partition (ds, hr) select * from srcpart where ds is not null; -desc extended analyze_t1; +desc formatted analyze_t1; explain analyze table analyze_t1 partition (ds, hr) compute statistics; analyze table analyze_t1 partition (ds, hr) compute statistics; -describe extended analyze_t1; +describe formatted analyze_t1; Index: ql/src/test/queries/clientpositive/stats12.q =================================================================== --- ql/src/test/queries/clientpositive/stats12.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats12.q (working copy) @@ -11,9 +11,9 @@ analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr) compute statistics; -desc extended analyze_srcpart; -desc extended analyze_srcpart partition (ds='2008-04-08', hr=11); -desc extended analyze_srcpart partition (ds='2008-04-08', hr=12); -desc extended analyze_srcpart partition (ds='2008-04-09', hr=11); -desc extended analyze_srcpart partition (ds='2008-04-09', hr=12); +desc formatted analyze_srcpart; +desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11); +desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12); +desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11); +desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12); Index: ql/src/test/queries/clientpositive/stats4.q =================================================================== --- ql/src/test/queries/clientpositive/stats4.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats4.q (working copy) @@ -28,13 +28,13 @@ select * from nzhang_part1 where ds is not null and hr is not null; select * from nzhang_part2 where ds is not null and hr is not null; -describe extended nzhang_part1 partition(ds='2008-04-08',hr=11); -describe extended nzhang_part1 partition(ds='2008-04-08',hr=12); -describe extended nzhang_part2 partition(ds='2008-12-31',hr=11); -describe extended nzhang_part2 partition(ds='2008-12-31',hr=12); +describe formatted nzhang_part1 partition(ds='2008-04-08',hr=11); +describe formatted nzhang_part1 partition(ds='2008-04-08',hr=12); +describe formatted nzhang_part2 partition(ds='2008-12-31',hr=11); +describe formatted nzhang_part2 partition(ds='2008-12-31',hr=12); -describe extended nzhang_part1; -describe extended nzhang_part2; +describe formatted nzhang_part1; +describe formatted nzhang_part2; drop table nzhang_part1; drop table nzhang_part2; Index: ql/src/test/queries/clientpositive/stats6.q =================================================================== --- ql/src/test/queries/clientpositive/stats6.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats6.q (working copy) @@ -9,9 +9,9 @@ analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics; analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics; -describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11); -describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12); -describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11); -describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12); +describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11); +describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12); +describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11); +describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12); -describe extended analyze_srcpart; +describe formatted analyze_srcpart; Index: ql/src/test/queries/clientpositive/stats8.q =================================================================== --- ql/src/test/queries/clientpositive/stats8.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats8.q (working copy) @@ -8,26 +8,26 @@ explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics; analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics; -describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11); -describe extended analyze_srcpart; +describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11); +describe formatted analyze_srcpart; explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics; analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics; -describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12); +describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12); explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics; analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics; -describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11); +describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11); explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics; analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics; -describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12); +describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12); explain analyze table analyze_srcpart PARTITION(ds, hr) compute statistics; analyze table analyze_srcpart PARTITION(ds, hr) compute statistics; -describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11); -describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12); -describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11); -describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12); -describe extended analyze_srcpart; +describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11); +describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12); +describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=11); +describe formatted analyze_srcpart PARTITION(ds='2008-04-09',hr=12); +describe formatted analyze_srcpart; Index: ql/src/test/queries/clientpositive/stats18.q =================================================================== --- ql/src/test/queries/clientpositive/stats18.q (revision 0) +++ ql/src/test/queries/clientpositive/stats18.q (working copy) @@ -0,0 +1,21 @@ +set datanucleus.cache.collections=false; +set hive.stats.autogather=true; +set hive.merge.mapfiles=false; +set hive.merge.mapredfiles=false; +set hive.map.aggr=true; + +create table stats_part like srcpart; + +insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') select key, value from src; + +-- Load a file into a existing partition +-- Some stats (numFiles, totalSize) should be updated correctly +-- Some other stats (numRows, rawDataSize) should be cleared +desc formatted stats_part partition (ds='2010-04-08', hr='13'); + +load data local inpath '../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13'); + +desc formatted stats_part partition (ds='2010-04-08', hr='13'); + +drop table stats_src; +drop table stats_part; Index: ql/src/test/queries/clientpositive/stats1.q =================================================================== --- ql/src/test/queries/clientpositive/stats1.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats1.q (working copy) @@ -21,4 +21,10 @@ SELECT * FROM tmptable x SORT BY x.key, x.value; -DESCRIBE EXTENDED tmptable; +DESCRIBE FORMATTED tmptable; + +-- Load a file into a existing table +-- Some stats (numFiles, totalSize) should be updated correctly +-- Some other stats (numRows, rawDataSize) should be cleared +load data local inpath '../data/files/srcbucket20.txt' INTO TABLE tmptable; +DESCRIBE FORMATTED tmptable; \ No newline at end of file Index: ql/src/test/queries/clientpositive/stats11.q =================================================================== --- ql/src/test/queries/clientpositive/stats11.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats11.q (working copy) @@ -6,10 +6,17 @@ load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin; CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE; +explain load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08'); +load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08'); + +desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08'); load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08'); +desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08'); load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08'); +desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08'); load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08'); +desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08'); CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE; load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08'); Index: ql/src/test/queries/clientpositive/stats3.q =================================================================== --- ql/src/test/queries/clientpositive/stats3.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats3.q (working copy) @@ -4,8 +4,13 @@ drop table hive_test_dst; create table hive_test_src ( col1 string ) stored as textfile ; +explain extended load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ; +load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ; + +desc formatted hive_test_src; + create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile; insert overwrite table hive_test_dst partition ( pcol1='test_part', pCol2='test_Part') select col1 from hive_test_src ; select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part'; @@ -21,7 +26,7 @@ select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'; select * from hive_test_dst where pcol1='test_Part'; -describe extended hive_test_dst; +describe formatted hive_test_dst; drop table hive_test_src; drop table hive_test_dst; Index: ql/src/test/queries/clientpositive/stats13.q =================================================================== --- ql/src/test/queries/clientpositive/stats13.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats13.q (working copy) @@ -11,12 +11,12 @@ analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics; -desc extended analyze_srcpart; -desc extended analyze_srcpart partition (ds='2008-04-08', hr=11); -desc extended analyze_srcpart partition (ds='2008-04-08', hr=12); -desc extended analyze_srcpart partition (ds='2008-04-09', hr=11); -desc extended analyze_srcpart partition (ds='2008-04-09', hr=12); +desc formatted analyze_srcpart; +desc formatted analyze_srcpart partition (ds='2008-04-08', hr=11); +desc formatted analyze_srcpart partition (ds='2008-04-08', hr=12); +desc formatted analyze_srcpart partition (ds='2008-04-09', hr=11); +desc formatted analyze_srcpart partition (ds='2008-04-09', hr=12); create table analyze_srcpart2 like analyze_srcpart; -desc extended analyze_srcpart2; +desc formatted analyze_srcpart2; Index: ql/src/test/queries/clientpositive/stats5.q =================================================================== --- ql/src/test/queries/clientpositive/stats5.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats5.q (working copy) @@ -7,4 +7,4 @@ analyze table analyze_src compute statistics; -describe extended analyze_src; +describe formatted analyze_src; Index: ql/src/test/queries/clientpositive/stats15.q =================================================================== --- ql/src/test/queries/clientpositive/stats15.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats15.q (working copy) @@ -24,4 +24,4 @@ desc formatted stats_part; drop table stats_src; -drop table stats_part; \ No newline at end of file +drop table stats_part; Index: ql/src/test/queries/clientpositive/stats7.q =================================================================== --- ql/src/test/queries/clientpositive/stats7.q (revision 1370113) +++ ql/src/test/queries/clientpositive/stats7.q (working copy) @@ -10,7 +10,7 @@ analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr) compute statistics; -describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11); -describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12); +describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=11); +describe formatted analyze_srcpart PARTITION(ds='2008-04-08',hr=12); -describe extended analyze_srcpart; +describe formatted analyze_srcpart; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (revision 1370113) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (working copy) @@ -329,6 +329,15 @@ work.getAggKey(), atomic); statsAggregator.cleanUp(work.getAggKey()); } + // The collectable stats for the aggregator needs to be cleared. + // For eg. if a file is being loaded, the old number of rows are not valid + else if (work.isClearAggregatorStats()) { + for (String statType : collectableStats) { + if (parameters.containsKey(statType)) { + tblStats.setStat(statType, 0L); + } + } + } } else { // Partitioned table: // Need to get the old stats of the partition @@ -368,7 +377,16 @@ parameters, partitionID, atomic); } else { for (String statType : collectableStats) { - newPartStats.setStat(statType, currentValues.get(statType)); + // The collectable stats for the aggregator needs to be cleared. + // For eg. if a file is being loaded, the old number of rows are not valid + if (work.isClearAggregatorStats()) { + if (parameters.containsKey(statType)) { + newPartStats.setStat(statType, 0L); + } + } + else { + newPartStats.setStat(statType, currentValues.get(statType)); + } } } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java (revision 1370113) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java (working copy) @@ -30,12 +30,19 @@ public class StatsWork implements Serializable { private static final long serialVersionUID = 1L; - private tableSpec tableSpecs; // source table spec -- for TableScanOperator - private LoadTableDesc loadTableDesc; // same as MoveWork.loadTableDesc -- for FileSinkOperator - private LoadFileDesc loadFileDesc; // same as MoveWork.loadFileDesc -- for FileSinkOperator - private String aggKey; // aggregation key prefix - private boolean statsReliable; // are stats completely reliable + private tableSpec tableSpecs; // source table spec -- for TableScanOperator + private LoadTableDesc loadTableDesc; // same as MoveWork.loadTableDesc -- for FileSinkOperator + private LoadFileDesc loadFileDesc; // same as MoveWork.loadFileDesc -- for FileSinkOperator + private String aggKey; // aggregation key prefix + private boolean statsReliable; // are stats completely reliable + // If stats aggregator is not present, clear the current aggregator stats. + // For eg. if a merge is being performed, stats already collected by aggregator (numrows etc.) + // are still valid. However, if a load file is being performed, the old stats collected by + // aggregator are not valid. It might be a good idea to clear them instead of leaving wrong + // and old stats. + private boolean clearAggregatorStats = false; + private boolean noStatsAggregator = false; public StatsWork() { @@ -93,4 +100,12 @@ public void setStatsReliable(boolean statsReliable) { this.statsReliable = statsReliable; } + + public boolean isClearAggregatorStats() { + return clearAggregatorStats; + } + + public void setClearAggregatorStats(boolean clearAggregatorStats) { + this.clearAggregatorStats = clearAggregatorStats; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (revision 1370113) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (working copy) @@ -44,7 +44,7 @@ import org.apache.hadoop.hive.ql.plan.CopyWork; import org.apache.hadoop.hive.ql.plan.LoadTableDesc; import org.apache.hadoop.hive.ql.plan.MoveWork; -import org.apache.hadoop.util.Shell; +import org.apache.hadoop.hive.ql.plan.StatsWork; /** * LoadSemanticAnalyzer. @@ -259,30 +259,49 @@ LoadTableDesc loadTableWork = new LoadTableDesc(fromURI.toString(), loadTmpPath, Utilities.getTableDesc(ts.tableHandle), partSpec, isOverWrite); + Task childTask = TaskFactory.get(new MoveWork(getInputs(), + getOutputs(), loadTableWork, null, true), conf); if (rTask != null) { - rTask.addDependentTask(TaskFactory.get(new MoveWork(getInputs(), - getOutputs(), loadTableWork, null, true), conf)); + rTask.addDependentTask(childTask); } else { - rTask = TaskFactory.get(new MoveWork(getInputs(), getOutputs(), - loadTableWork, null, true), conf); + rTask = childTask; } rootTasks.add(rTask); + + // The user asked for stats to be collected. + // Some stats like number of rows require a scan of the data + // However, some other stats, like number of files, do not require a complete scan + // Update the stats which do not require a complete scan. + Task statTask = null; + if (conf.getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { + StatsWork statDesc = new StatsWork(loadTableWork); + statDesc.setNoStatsAggregator(true); + statDesc.setClearAggregatorStats(true); + statDesc.setStatsReliable(conf.getBoolVar(HiveConf.ConfVars.HIVE_STATS_RELIABLE)); + statTask = TaskFactory.get(statDesc, conf); + } + + // HIVE-3334 has been filed for load file with index auto update if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEINDEXAUTOUPDATE)) { IndexUpdater indexUpdater = new IndexUpdater(loadTableWork, getInputs(), conf); try { List> indexUpdateTasks = indexUpdater.generateUpdateTasks(); + for (Task updateTask : indexUpdateTasks) { - //LOAD DATA will either have a copy & move or just a move, we always want the update to be dependent on the move - if (rTask.getChildren() == null || rTask.getChildren().size() == 0) { - rTask.addDependentTask(updateTask); - } else { - ((Task)rTask.getChildren().get(0)).addDependentTask(updateTask); + //LOAD DATA will either have a copy & move or just a move, + // we always want the update to be dependent on the move + childTask.addDependentTask(updateTask); + if (statTask != null) { + updateTask.addDependentTask(statTask); } } } catch (HiveException e) { console.printInfo("WARNING: could not auto-update stale indexes, indexes are not out of sync"); } } + else if (statTask != null) { + childTask.addDependentTask(statTask); + } } }